var/home/core/zuul-output/0000755000175000017500000000000015116213324014523 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015116225674015502 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004753205315116225665017717 0ustar rootrootDec 10 06:47:59 crc systemd[1]: Starting Kubernetes Kubelet... Dec 10 06:47:59 crc restorecon[4759]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:47:59 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 06:48:00 crc restorecon[4759]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 06:48:00 crc restorecon[4759]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 10 06:48:00 crc kubenswrapper[4765]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 10 06:48:00 crc kubenswrapper[4765]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 10 06:48:00 crc kubenswrapper[4765]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 10 06:48:00 crc kubenswrapper[4765]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 10 06:48:00 crc kubenswrapper[4765]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 10 06:48:00 crc kubenswrapper[4765]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.397999 4765 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.400891 4765 feature_gate.go:330] unrecognized feature gate: Example Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.400910 4765 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.400915 4765 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.400919 4765 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.400924 4765 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.400930 4765 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.400935 4765 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.400942 4765 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.400948 4765 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.400957 4765 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.400963 4765 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.400969 4765 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.400974 4765 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.400979 4765 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.400985 4765 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.400990 4765 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.400994 4765 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.400998 4765 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401003 4765 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401007 4765 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401011 4765 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401016 4765 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401020 4765 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401027 4765 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401031 4765 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401035 4765 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401040 4765 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401045 4765 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401049 4765 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401053 4765 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401057 4765 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401061 4765 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401064 4765 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401069 4765 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401074 4765 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401078 4765 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401105 4765 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401110 4765 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401115 4765 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401119 4765 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401124 4765 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401129 4765 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401133 4765 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401138 4765 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401142 4765 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401147 4765 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401152 4765 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401158 4765 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401163 4765 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401168 4765 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401172 4765 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401177 4765 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401181 4765 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401185 4765 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401189 4765 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401195 4765 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401200 4765 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401204 4765 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401209 4765 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401213 4765 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401217 4765 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401221 4765 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401226 4765 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401230 4765 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401234 4765 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401238 4765 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401242 4765 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401247 4765 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401250 4765 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401254 4765 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.401260 4765 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401337 4765 flags.go:64] FLAG: --address="0.0.0.0" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401347 4765 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401357 4765 flags.go:64] FLAG: --anonymous-auth="true" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401363 4765 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401369 4765 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401374 4765 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401380 4765 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401386 4765 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401390 4765 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401395 4765 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401399 4765 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401404 4765 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401409 4765 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401413 4765 flags.go:64] FLAG: --cgroup-root="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401417 4765 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401422 4765 flags.go:64] FLAG: --client-ca-file="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401426 4765 flags.go:64] FLAG: --cloud-config="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401430 4765 flags.go:64] FLAG: --cloud-provider="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401435 4765 flags.go:64] FLAG: --cluster-dns="[]" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401439 4765 flags.go:64] FLAG: --cluster-domain="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401444 4765 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401448 4765 flags.go:64] FLAG: --config-dir="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401453 4765 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401458 4765 flags.go:64] FLAG: --container-log-max-files="5" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401465 4765 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401470 4765 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401475 4765 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401480 4765 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401484 4765 flags.go:64] FLAG: --contention-profiling="false" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401489 4765 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401494 4765 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401498 4765 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401502 4765 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401509 4765 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401515 4765 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401520 4765 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401525 4765 flags.go:64] FLAG: --enable-load-reader="false" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401530 4765 flags.go:64] FLAG: --enable-server="true" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401535 4765 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401541 4765 flags.go:64] FLAG: --event-burst="100" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401545 4765 flags.go:64] FLAG: --event-qps="50" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401550 4765 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401554 4765 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401558 4765 flags.go:64] FLAG: --eviction-hard="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401564 4765 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401568 4765 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401573 4765 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401577 4765 flags.go:64] FLAG: --eviction-soft="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401581 4765 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401585 4765 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401589 4765 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401594 4765 flags.go:64] FLAG: --experimental-mounter-path="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401598 4765 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401602 4765 flags.go:64] FLAG: --fail-swap-on="true" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401606 4765 flags.go:64] FLAG: --feature-gates="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401611 4765 flags.go:64] FLAG: --file-check-frequency="20s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401615 4765 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401620 4765 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401624 4765 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401630 4765 flags.go:64] FLAG: --healthz-port="10248" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401635 4765 flags.go:64] FLAG: --help="false" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401639 4765 flags.go:64] FLAG: --hostname-override="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401644 4765 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401648 4765 flags.go:64] FLAG: --http-check-frequency="20s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401652 4765 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401657 4765 flags.go:64] FLAG: --image-credential-provider-config="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401661 4765 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401666 4765 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401671 4765 flags.go:64] FLAG: --image-service-endpoint="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401675 4765 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401679 4765 flags.go:64] FLAG: --kube-api-burst="100" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401683 4765 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401687 4765 flags.go:64] FLAG: --kube-api-qps="50" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401692 4765 flags.go:64] FLAG: --kube-reserved="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401697 4765 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401701 4765 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401705 4765 flags.go:64] FLAG: --kubelet-cgroups="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401709 4765 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401714 4765 flags.go:64] FLAG: --lock-file="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401718 4765 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401724 4765 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401728 4765 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401735 4765 flags.go:64] FLAG: --log-json-split-stream="false" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401740 4765 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401744 4765 flags.go:64] FLAG: --log-text-split-stream="false" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401748 4765 flags.go:64] FLAG: --logging-format="text" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401753 4765 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401757 4765 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401762 4765 flags.go:64] FLAG: --manifest-url="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401767 4765 flags.go:64] FLAG: --manifest-url-header="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401773 4765 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401778 4765 flags.go:64] FLAG: --max-open-files="1000000" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401784 4765 flags.go:64] FLAG: --max-pods="110" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401789 4765 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401794 4765 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401799 4765 flags.go:64] FLAG: --memory-manager-policy="None" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401805 4765 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401810 4765 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401815 4765 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401820 4765 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401830 4765 flags.go:64] FLAG: --node-status-max-images="50" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401834 4765 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401839 4765 flags.go:64] FLAG: --oom-score-adj="-999" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401843 4765 flags.go:64] FLAG: --pod-cidr="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401847 4765 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401855 4765 flags.go:64] FLAG: --pod-manifest-path="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401860 4765 flags.go:64] FLAG: --pod-max-pids="-1" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401865 4765 flags.go:64] FLAG: --pods-per-core="0" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401870 4765 flags.go:64] FLAG: --port="10250" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401875 4765 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401880 4765 flags.go:64] FLAG: --provider-id="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401884 4765 flags.go:64] FLAG: --qos-reserved="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401889 4765 flags.go:64] FLAG: --read-only-port="10255" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401893 4765 flags.go:64] FLAG: --register-node="true" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401898 4765 flags.go:64] FLAG: --register-schedulable="true" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401902 4765 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401913 4765 flags.go:64] FLAG: --registry-burst="10" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401918 4765 flags.go:64] FLAG: --registry-qps="5" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401922 4765 flags.go:64] FLAG: --reserved-cpus="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401926 4765 flags.go:64] FLAG: --reserved-memory="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401932 4765 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401936 4765 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401941 4765 flags.go:64] FLAG: --rotate-certificates="false" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401945 4765 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401950 4765 flags.go:64] FLAG: --runonce="false" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401954 4765 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401959 4765 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401963 4765 flags.go:64] FLAG: --seccomp-default="false" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401968 4765 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401972 4765 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401977 4765 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401982 4765 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401987 4765 flags.go:64] FLAG: --storage-driver-password="root" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401991 4765 flags.go:64] FLAG: --storage-driver-secure="false" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.401995 4765 flags.go:64] FLAG: --storage-driver-table="stats" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402000 4765 flags.go:64] FLAG: --storage-driver-user="root" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402004 4765 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402009 4765 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402013 4765 flags.go:64] FLAG: --system-cgroups="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402019 4765 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402028 4765 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402033 4765 flags.go:64] FLAG: --tls-cert-file="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402038 4765 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402045 4765 flags.go:64] FLAG: --tls-min-version="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402050 4765 flags.go:64] FLAG: --tls-private-key-file="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402056 4765 flags.go:64] FLAG: --topology-manager-policy="none" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402062 4765 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402067 4765 flags.go:64] FLAG: --topology-manager-scope="container" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402073 4765 flags.go:64] FLAG: --v="2" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402100 4765 flags.go:64] FLAG: --version="false" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402107 4765 flags.go:64] FLAG: --vmodule="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402112 4765 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402117 4765 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402247 4765 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402253 4765 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402258 4765 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402262 4765 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402266 4765 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402270 4765 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402274 4765 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402280 4765 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402283 4765 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402287 4765 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402291 4765 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402295 4765 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402299 4765 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402303 4765 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402307 4765 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402311 4765 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402315 4765 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402319 4765 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402323 4765 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402327 4765 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402331 4765 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402336 4765 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402341 4765 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402346 4765 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402351 4765 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402356 4765 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402360 4765 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402365 4765 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402377 4765 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402382 4765 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402386 4765 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402389 4765 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402393 4765 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402397 4765 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402401 4765 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402405 4765 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402410 4765 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402414 4765 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402418 4765 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402423 4765 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402427 4765 feature_gate.go:330] unrecognized feature gate: Example Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402431 4765 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402435 4765 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402439 4765 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402442 4765 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402446 4765 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402449 4765 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402453 4765 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402456 4765 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402460 4765 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402463 4765 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402468 4765 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402473 4765 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402476 4765 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402481 4765 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402485 4765 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402495 4765 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402499 4765 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402503 4765 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402507 4765 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402510 4765 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402514 4765 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402518 4765 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402521 4765 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402526 4765 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402530 4765 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402534 4765 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402537 4765 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402541 4765 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402545 4765 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.402549 4765 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.402556 4765 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.414300 4765 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.414378 4765 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414496 4765 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414510 4765 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414519 4765 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414531 4765 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414539 4765 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414545 4765 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414551 4765 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414557 4765 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414563 4765 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414571 4765 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414577 4765 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414583 4765 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414589 4765 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414595 4765 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414600 4765 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414607 4765 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414614 4765 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414621 4765 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414628 4765 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414636 4765 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414643 4765 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414649 4765 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414654 4765 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414660 4765 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414666 4765 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414671 4765 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414677 4765 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414683 4765 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414688 4765 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414693 4765 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414701 4765 feature_gate.go:330] unrecognized feature gate: Example Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414709 4765 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414718 4765 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414727 4765 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414738 4765 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414746 4765 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414754 4765 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414761 4765 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414769 4765 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414776 4765 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414784 4765 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414791 4765 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414798 4765 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414805 4765 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414813 4765 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414821 4765 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414831 4765 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414839 4765 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414848 4765 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414855 4765 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414863 4765 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414870 4765 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414877 4765 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414884 4765 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414891 4765 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414897 4765 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414904 4765 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414910 4765 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414917 4765 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414923 4765 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414930 4765 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414937 4765 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414943 4765 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414950 4765 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414957 4765 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414963 4765 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414972 4765 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414979 4765 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414985 4765 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.414992 4765 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415001 4765 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.415015 4765 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415287 4765 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415308 4765 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415316 4765 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415324 4765 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415332 4765 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415340 4765 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415348 4765 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415356 4765 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415364 4765 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415373 4765 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415379 4765 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415388 4765 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415395 4765 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415402 4765 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415409 4765 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415416 4765 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415423 4765 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415430 4765 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415437 4765 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415445 4765 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415451 4765 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415458 4765 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415464 4765 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415471 4765 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415480 4765 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415490 4765 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415498 4765 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415505 4765 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415516 4765 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415525 4765 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415532 4765 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415539 4765 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415546 4765 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415555 4765 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415565 4765 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415572 4765 feature_gate.go:330] unrecognized feature gate: Example Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415580 4765 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415588 4765 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415597 4765 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415607 4765 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415618 4765 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415625 4765 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415631 4765 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415638 4765 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415645 4765 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415652 4765 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415659 4765 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415665 4765 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415672 4765 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415679 4765 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415685 4765 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415692 4765 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415699 4765 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415706 4765 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415715 4765 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415721 4765 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415728 4765 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415735 4765 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415741 4765 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415747 4765 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415754 4765 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415761 4765 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415767 4765 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415774 4765 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415780 4765 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415787 4765 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415794 4765 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415801 4765 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415808 4765 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415815 4765 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.415823 4765 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.415836 4765 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.416235 4765 server.go:940] "Client rotation is on, will bootstrap in background" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.421291 4765 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.421477 4765 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.422269 4765 server.go:997] "Starting client certificate rotation" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.422315 4765 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.422521 4765 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-01 21:40:38.403664535 +0000 UTC Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.422622 4765 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.429164 4765 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.433642 4765 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 10 06:48:00 crc kubenswrapper[4765]: E1210 06:48:00.434717 4765 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.41:6443: connect: connection refused" logger="UnhandledError" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.441719 4765 log.go:25] "Validated CRI v1 runtime API" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.460436 4765 log.go:25] "Validated CRI v1 image API" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.462022 4765 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.465047 4765 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-10-06-43-20-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.465167 4765 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.481248 4765 manager.go:217] Machine: {Timestamp:2025-12-10 06:48:00.479466102 +0000 UTC m=+0.206131438 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:f84f1526-209e-4d0d-8c67-84d36e1af992 BootID:0e72ffd7-59d0-4884-9bfb-94943bbc5155 Filesystems:[{Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:f8:50:ae Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:f8:50:ae Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:d0:b3:c5 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:ea:63:36 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:ac:59:71 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:3d:66:d8 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:63:1c:a2 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:1e:4e:65:f8:4e:fa Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:26:cb:b3:ef:8f:92 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.481860 4765 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.482126 4765 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.482580 4765 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.482764 4765 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.482806 4765 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.483109 4765 topology_manager.go:138] "Creating topology manager with none policy" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.483123 4765 container_manager_linux.go:303] "Creating device plugin manager" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.483277 4765 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.483311 4765 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.483612 4765 state_mem.go:36] "Initialized new in-memory state store" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.483701 4765 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.484352 4765 kubelet.go:418] "Attempting to sync node with API server" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.484372 4765 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.484394 4765 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.484408 4765 kubelet.go:324] "Adding apiserver pod source" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.484419 4765 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.487264 4765 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.487346 4765 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 10 06:48:00 crc kubenswrapper[4765]: E1210 06:48:00.487455 4765 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.41:6443: connect: connection refused" logger="UnhandledError" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.487609 4765 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.487614 4765 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 10 06:48:00 crc kubenswrapper[4765]: E1210 06:48:00.487695 4765 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.41:6443: connect: connection refused" logger="UnhandledError" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.489517 4765 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.490417 4765 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.490509 4765 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.490572 4765 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.490640 4765 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.490707 4765 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.490764 4765 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.490829 4765 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.490894 4765 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.490954 4765 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.491011 4765 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.491071 4765 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.491150 4765 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.491401 4765 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.492266 4765 server.go:1280] "Started kubelet" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.492384 4765 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.492726 4765 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.493237 4765 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.493808 4765 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 10 06:48:00 crc kubenswrapper[4765]: E1210 06:48:00.494420 4765 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.41:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187fc7d1403b1552 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-10 06:48:00.49222997 +0000 UTC m=+0.218895296,LastTimestamp:2025-12-10 06:48:00.49222997 +0000 UTC m=+0.218895296,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 10 06:48:00 crc systemd[1]: Started Kubernetes Kubelet. Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.495782 4765 server.go:460] "Adding debug handlers to kubelet server" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.498527 4765 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.498627 4765 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.498640 4765 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 10:33:16.667868904 +0000 UTC Dec 10 06:48:00 crc kubenswrapper[4765]: E1210 06:48:00.498814 4765 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.498870 4765 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.498911 4765 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.498939 4765 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.499336 4765 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 10 06:48:00 crc kubenswrapper[4765]: E1210 06:48:00.499407 4765 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.41:6443: connect: connection refused" logger="UnhandledError" Dec 10 06:48:00 crc kubenswrapper[4765]: E1210 06:48:00.499327 4765 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="200ms" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.499681 4765 factory.go:55] Registering systemd factory Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.499708 4765 factory.go:221] Registration of the systemd container factory successfully Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.500284 4765 factory.go:153] Registering CRI-O factory Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.500317 4765 factory.go:221] Registration of the crio container factory successfully Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.500413 4765 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.500443 4765 factory.go:103] Registering Raw factory Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.500462 4765 manager.go:1196] Started watching for new ooms in manager Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.503622 4765 manager.go:319] Starting recovery of all containers Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.508992 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509049 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509064 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509080 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509134 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509148 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509158 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509168 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509185 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509197 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509211 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509222 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509235 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509248 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509264 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509277 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509288 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509310 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509326 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509344 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509358 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509371 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509384 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509395 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509409 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509419 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509439 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509452 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509466 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509478 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509544 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509560 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509573 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509634 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509652 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509676 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509691 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509704 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509722 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509735 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509750 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509761 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509775 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509833 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509848 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509868 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509883 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509896 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509915 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509929 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509942 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509957 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.509986 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510006 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510029 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510050 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510066 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510104 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510121 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510136 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510152 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510166 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510186 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510201 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510213 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510231 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510244 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510261 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510281 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510296 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510312 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510325 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510340 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510351 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510365 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510384 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510396 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510408 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510424 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510437 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510454 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510468 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510481 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510495 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510507 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510523 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510535 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510549 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510563 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510575 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510594 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510607 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510619 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510634 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510646 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510660 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510671 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510683 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510699 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510712 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510727 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510741 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510753 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510768 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510788 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510808 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510826 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510843 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510854 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510869 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510886 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510897 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510912 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510925 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510938 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510952 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510963 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510975 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510984 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.510998 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511008 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511020 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511031 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511041 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511053 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511063 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511072 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511099 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511110 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511123 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511135 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511144 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511157 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511168 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511181 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511191 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511201 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511213 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511227 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511242 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511255 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511268 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511281 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511292 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511304 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511317 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511328 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511342 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511353 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511365 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511375 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511386 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511399 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511412 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511423 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511437 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511446 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511465 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511475 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511486 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511499 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511509 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511525 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511535 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511546 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511558 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511573 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511585 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511595 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511606 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511620 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511632 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511647 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511658 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511670 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511684 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511694 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511707 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511716 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511726 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511739 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511749 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511759 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511776 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511785 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511799 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511813 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511823 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.511836 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.513586 4765 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.513675 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.513717 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.513735 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.513754 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.513775 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.513794 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.513815 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.513832 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.513848 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.513870 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.513887 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.513939 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.514650 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.514697 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.514715 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.514731 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.514748 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.514768 4765 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.514780 4765 reconstruct.go:97] "Volume reconstruction finished" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.514791 4765 reconciler.go:26] "Reconciler: start to sync state" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.523474 4765 manager.go:324] Recovery completed Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.535610 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.537320 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.537356 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.537367 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.538059 4765 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.538075 4765 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.538110 4765 state_mem.go:36] "Initialized new in-memory state store" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.579440 4765 policy_none.go:49] "None policy: Start" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.581168 4765 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.581210 4765 state_mem.go:35] "Initializing new in-memory state store" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.585843 4765 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.587629 4765 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.587712 4765 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.587741 4765 kubelet.go:2335] "Starting kubelet main sync loop" Dec 10 06:48:00 crc kubenswrapper[4765]: E1210 06:48:00.587849 4765 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 10 06:48:00 crc kubenswrapper[4765]: W1210 06:48:00.588424 4765 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 10 06:48:00 crc kubenswrapper[4765]: E1210 06:48:00.588469 4765 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.41:6443: connect: connection refused" logger="UnhandledError" Dec 10 06:48:00 crc kubenswrapper[4765]: E1210 06:48:00.599454 4765 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.638345 4765 manager.go:334] "Starting Device Plugin manager" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.639052 4765 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.639101 4765 server.go:79] "Starting device plugin registration server" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.639836 4765 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.639856 4765 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.640029 4765 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.640156 4765 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.640170 4765 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 10 06:48:00 crc kubenswrapper[4765]: E1210 06:48:00.647859 4765 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.688879 4765 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.689012 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.690466 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.690554 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.690571 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.690855 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.690974 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.691027 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.692230 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.692259 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.692301 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.692362 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.692391 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.692401 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.692568 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.692709 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.692756 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.693537 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.693568 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.693580 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.693619 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.693635 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.693643 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.693767 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.693968 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.694208 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.694573 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.694612 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.694621 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.694729 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.694847 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.694883 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.695586 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.695607 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.695617 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.695708 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.695715 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.695818 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.695843 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.695757 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.695887 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.696218 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.696265 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.697223 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.697253 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.697262 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:00 crc kubenswrapper[4765]: E1210 06:48:00.705351 4765 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="400ms" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.717001 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.717042 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.717066 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.717102 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.717125 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.717148 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.717228 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.717263 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.717299 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.717317 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.717332 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.717349 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.717382 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.717398 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.717414 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.740022 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.741324 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.741379 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.741421 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.741457 4765 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 06:48:00 crc kubenswrapper[4765]: E1210 06:48:00.741974 4765 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.41:6443: connect: connection refused" node="crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.817990 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818036 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818060 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818108 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818132 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818154 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818174 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818211 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818181 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818220 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818194 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818280 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818286 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818258 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818256 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818344 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818325 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818427 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818521 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818459 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818619 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818677 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818703 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818730 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818761 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818739 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818778 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818823 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818940 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.818905 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 06:48:00 crc kubenswrapper[4765]: E1210 06:48:00.831595 4765 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.41:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187fc7d1403b1552 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-10 06:48:00.49222997 +0000 UTC m=+0.218895296,LastTimestamp:2025-12-10 06:48:00.49222997 +0000 UTC m=+0.218895296,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.942533 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.944965 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.945014 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.945027 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:00 crc kubenswrapper[4765]: I1210 06:48:00.945061 4765 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 06:48:00 crc kubenswrapper[4765]: E1210 06:48:00.945729 4765 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.41:6443: connect: connection refused" node="crc" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.024678 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.030997 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 10 06:48:01 crc kubenswrapper[4765]: W1210 06:48:01.052754 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-67bb05c7cead748d4d52deed6bab4e0796024e520f24b146600b1c5bbd805c24 WatchSource:0}: Error finding container 67bb05c7cead748d4d52deed6bab4e0796024e520f24b146600b1c5bbd805c24: Status 404 returned error can't find the container with id 67bb05c7cead748d4d52deed6bab4e0796024e520f24b146600b1c5bbd805c24 Dec 10 06:48:01 crc kubenswrapper[4765]: W1210 06:48:01.055919 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-0244bbfbbaa58d44bf994676cce596573152c76897c3687c502323e2b9b74ee4 WatchSource:0}: Error finding container 0244bbfbbaa58d44bf994676cce596573152c76897c3687c502323e2b9b74ee4: Status 404 returned error can't find the container with id 0244bbfbbaa58d44bf994676cce596573152c76897c3687c502323e2b9b74ee4 Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.060217 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:48:01 crc kubenswrapper[4765]: W1210 06:48:01.078415 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-aa413a69db6ec8d66243e93c0d5fa5f5923649692de52576f73f46bf9065e3fc WatchSource:0}: Error finding container aa413a69db6ec8d66243e93c0d5fa5f5923649692de52576f73f46bf9065e3fc: Status 404 returned error can't find the container with id aa413a69db6ec8d66243e93c0d5fa5f5923649692de52576f73f46bf9065e3fc Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.085514 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.092613 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 06:48:01 crc kubenswrapper[4765]: W1210 06:48:01.101527 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-c373ed0e9d8c285c357349cdd690141fa7a7ed3f2a8a02e8d4f6818a4bf682b5 WatchSource:0}: Error finding container c373ed0e9d8c285c357349cdd690141fa7a7ed3f2a8a02e8d4f6818a4bf682b5: Status 404 returned error can't find the container with id c373ed0e9d8c285c357349cdd690141fa7a7ed3f2a8a02e8d4f6818a4bf682b5 Dec 10 06:48:01 crc kubenswrapper[4765]: E1210 06:48:01.107146 4765 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="800ms" Dec 10 06:48:01 crc kubenswrapper[4765]: W1210 06:48:01.109522 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-266fbc58cb75de8108999283f079702f25f379958f538bfc488baa70ac68dfd6 WatchSource:0}: Error finding container 266fbc58cb75de8108999283f079702f25f379958f538bfc488baa70ac68dfd6: Status 404 returned error can't find the container with id 266fbc58cb75de8108999283f079702f25f379958f538bfc488baa70ac68dfd6 Dec 10 06:48:01 crc kubenswrapper[4765]: W1210 06:48:01.338961 4765 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 10 06:48:01 crc kubenswrapper[4765]: E1210 06:48:01.339053 4765 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.41:6443: connect: connection refused" logger="UnhandledError" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.346388 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.347421 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.347469 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.347483 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.347513 4765 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 06:48:01 crc kubenswrapper[4765]: E1210 06:48:01.348024 4765 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.41:6443: connect: connection refused" node="crc" Dec 10 06:48:01 crc kubenswrapper[4765]: W1210 06:48:01.395925 4765 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 10 06:48:01 crc kubenswrapper[4765]: E1210 06:48:01.395999 4765 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.41:6443: connect: connection refused" logger="UnhandledError" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.493230 4765 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.499389 4765 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 19:38:28.694762448 +0000 UTC Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.499463 4765 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 36h50m27.195303039s for next certificate rotation Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.593062 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530"} Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.593175 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c373ed0e9d8c285c357349cdd690141fa7a7ed3f2a8a02e8d4f6818a4bf682b5"} Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.594395 4765 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d" exitCode=0 Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.594471 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d"} Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.594489 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"aa413a69db6ec8d66243e93c0d5fa5f5923649692de52576f73f46bf9065e3fc"} Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.594581 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.595778 4765 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="cc204cb81c29a5156106504359f9e2e7aac7e25ccbbfafa885d90b04f36f38e9" exitCode=0 Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.595821 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"cc204cb81c29a5156106504359f9e2e7aac7e25ccbbfafa885d90b04f36f38e9"} Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.595835 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"67bb05c7cead748d4d52deed6bab4e0796024e520f24b146600b1c5bbd805c24"} Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.595844 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.595879 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.595890 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.595897 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.596529 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.596557 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.596566 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.597137 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.597545 4765 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="2d329e28854c2f00353adeec805c2657d942b0c7a2fb737fc741b7bb9645ee7e" exitCode=0 Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.597612 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"2d329e28854c2f00353adeec805c2657d942b0c7a2fb737fc741b7bb9645ee7e"} Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.597633 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"0244bbfbbaa58d44bf994676cce596573152c76897c3687c502323e2b9b74ee4"} Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.597682 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.598128 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.598159 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.598176 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.598281 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.598308 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.598320 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.599287 4765 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="926164f9337a99fd803f48660fa3af44eeda3e6aae79d367aae85b9c3bb5fb09" exitCode=0 Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.599328 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"926164f9337a99fd803f48660fa3af44eeda3e6aae79d367aae85b9c3bb5fb09"} Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.599391 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"266fbc58cb75de8108999283f079702f25f379958f538bfc488baa70ac68dfd6"} Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.599492 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.600292 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.600318 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:01 crc kubenswrapper[4765]: I1210 06:48:01.600327 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:01 crc kubenswrapper[4765]: W1210 06:48:01.855508 4765 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 10 06:48:01 crc kubenswrapper[4765]: E1210 06:48:01.855613 4765 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.41:6443: connect: connection refused" logger="UnhandledError" Dec 10 06:48:01 crc kubenswrapper[4765]: E1210 06:48:01.908772 4765 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="1.6s" Dec 10 06:48:02 crc kubenswrapper[4765]: W1210 06:48:02.099257 4765 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 10 06:48:02 crc kubenswrapper[4765]: E1210 06:48:02.099360 4765 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.41:6443: connect: connection refused" logger="UnhandledError" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.148219 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.149788 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.149875 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.149884 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.149910 4765 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 06:48:02 crc kubenswrapper[4765]: E1210 06:48:02.151877 4765 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.41:6443: connect: connection refused" node="crc" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.493718 4765 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.607412 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.607489 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a"} Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.607870 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80"} Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.607885 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f"} Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.607897 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada"} Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.608001 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d"} Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.608490 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.608522 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.608539 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.609251 4765 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="52cf96507e19767cc05ec2b4317b5333d654f11d72dcfdbafbc3a07559dc71c8" exitCode=0 Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.609322 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"52cf96507e19767cc05ec2b4317b5333d654f11d72dcfdbafbc3a07559dc71c8"} Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.609424 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.610545 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.610570 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.610583 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.611899 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"340c3e06756b15ba13c80b32c52549da6521f472184cbf565f16665ddfff66ec"} Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.612023 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.613227 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.613257 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.613272 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.614740 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"f12bd7ae1e74d43db676788d30203875f6682194c96ff3db0aed89adc94ec963"} Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.614769 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"643ed52b9915c3dd9ee250f2a7ab4b1b6edf81ad2bf22195f11f2100c7f13003"} Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.614779 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"58f060c79f0e4210821a5b5485b559551df6b2652557a3d09c13c02aad0f62e5"} Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.614845 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.615520 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.615545 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.615556 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.619883 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc"} Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.619922 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b"} Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.619939 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6"} Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.619924 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.622824 4765 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 10 06:48:02 crc kubenswrapper[4765]: E1210 06:48:02.623797 4765 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.41:6443: connect: connection refused" logger="UnhandledError" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.624728 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.624756 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.624766 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.636147 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.636354 4765 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": dial tcp 192.168.126.11:6443: connect: connection refused" start-of-body= Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.636397 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": dial tcp 192.168.126.11:6443: connect: connection refused" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.651328 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:48:02 crc kubenswrapper[4765]: I1210 06:48:02.659134 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.624976 4765 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="160fc5bb9223833197e8de0c338359fe0abc5d0cb0639998ae993c1c6d17f405" exitCode=0 Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.625057 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"160fc5bb9223833197e8de0c338359fe0abc5d0cb0639998ae993c1c6d17f405"} Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.625078 4765 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.625139 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.625169 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.625716 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.626157 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.626184 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.626194 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.626228 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.626245 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.626256 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.626784 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.626817 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.626827 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.752007 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.753036 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.753067 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.753079 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.753121 4765 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.753712 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.753830 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.754636 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.754659 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:03 crc kubenswrapper[4765]: I1210 06:48:03.754669 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.631587 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"5dfafc91dd81fc735fd85c1f08c2c79a25c1770b4d06055c9fd804f670ad6cf5"} Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.631636 4765 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.631649 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b029fdc5e4db0b2caf4af8084e233bc8a2c12fefdbefdc6129b2d6cfaabcef4a"} Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.631667 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"242b1af41a23126fd0f97ed46ddf935900ec5ec5b63b2ea02bb20cf21f6fb7ad"} Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.631677 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"968ac093e4144edc9a913cf3bf094e4e53bfbdb683d89f1295ff2e4e54a922c8"} Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.631687 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.631638 4765 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.631686 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"bd71f6dc010649075f75e94e68e9ef048371493e64376909153e8f3d902ac129"} Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.631774 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.631787 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.632899 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.632932 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.632945 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.633017 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.633052 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.633062 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.632945 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.633118 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:04 crc kubenswrapper[4765]: I1210 06:48:04.633129 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:05 crc kubenswrapper[4765]: I1210 06:48:05.247347 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:48:05 crc kubenswrapper[4765]: I1210 06:48:05.634273 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:05 crc kubenswrapper[4765]: I1210 06:48:05.634292 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:05 crc kubenswrapper[4765]: I1210 06:48:05.635358 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:05 crc kubenswrapper[4765]: I1210 06:48:05.635381 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:05 crc kubenswrapper[4765]: I1210 06:48:05.635391 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:05 crc kubenswrapper[4765]: I1210 06:48:05.635423 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:05 crc kubenswrapper[4765]: I1210 06:48:05.635445 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:05 crc kubenswrapper[4765]: I1210 06:48:05.635453 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:05 crc kubenswrapper[4765]: I1210 06:48:05.782278 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:48:05 crc kubenswrapper[4765]: I1210 06:48:05.782441 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:05 crc kubenswrapper[4765]: I1210 06:48:05.783473 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:05 crc kubenswrapper[4765]: I1210 06:48:05.783543 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:05 crc kubenswrapper[4765]: I1210 06:48:05.783554 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:06 crc kubenswrapper[4765]: I1210 06:48:06.000306 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 10 06:48:06 crc kubenswrapper[4765]: I1210 06:48:06.636922 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:06 crc kubenswrapper[4765]: I1210 06:48:06.637867 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:06 crc kubenswrapper[4765]: I1210 06:48:06.637902 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:06 crc kubenswrapper[4765]: I1210 06:48:06.637915 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:06 crc kubenswrapper[4765]: I1210 06:48:06.766548 4765 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 10 06:48:07 crc kubenswrapper[4765]: I1210 06:48:07.050801 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:48:07 crc kubenswrapper[4765]: I1210 06:48:07.051023 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:07 crc kubenswrapper[4765]: I1210 06:48:07.052556 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:07 crc kubenswrapper[4765]: I1210 06:48:07.052600 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:07 crc kubenswrapper[4765]: I1210 06:48:07.052614 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:07 crc kubenswrapper[4765]: I1210 06:48:07.310025 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:48:07 crc kubenswrapper[4765]: I1210 06:48:07.310254 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:07 crc kubenswrapper[4765]: I1210 06:48:07.311603 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:07 crc kubenswrapper[4765]: I1210 06:48:07.311641 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:07 crc kubenswrapper[4765]: I1210 06:48:07.311654 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:07 crc kubenswrapper[4765]: I1210 06:48:07.432317 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:48:07 crc kubenswrapper[4765]: I1210 06:48:07.638306 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:07 crc kubenswrapper[4765]: I1210 06:48:07.639336 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:07 crc kubenswrapper[4765]: I1210 06:48:07.639380 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:07 crc kubenswrapper[4765]: I1210 06:48:07.639392 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:09 crc kubenswrapper[4765]: I1210 06:48:09.367409 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 10 06:48:09 crc kubenswrapper[4765]: I1210 06:48:09.367576 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:09 crc kubenswrapper[4765]: I1210 06:48:09.368685 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:09 crc kubenswrapper[4765]: I1210 06:48:09.368714 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:09 crc kubenswrapper[4765]: I1210 06:48:09.368721 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:10 crc kubenswrapper[4765]: I1210 06:48:10.432753 4765 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 10 06:48:10 crc kubenswrapper[4765]: I1210 06:48:10.432896 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 06:48:10 crc kubenswrapper[4765]: E1210 06:48:10.648133 4765 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 10 06:48:12 crc kubenswrapper[4765]: I1210 06:48:12.854693 4765 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 10 06:48:12 crc kubenswrapper[4765]: I1210 06:48:12.854758 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 10 06:48:13 crc kubenswrapper[4765]: I1210 06:48:13.197285 4765 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 10 06:48:13 crc kubenswrapper[4765]: I1210 06:48:13.197337 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 10 06:48:13 crc kubenswrapper[4765]: I1210 06:48:13.201547 4765 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 10 06:48:13 crc kubenswrapper[4765]: I1210 06:48:13.201607 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 10 06:48:15 crc kubenswrapper[4765]: I1210 06:48:15.252194 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:48:15 crc kubenswrapper[4765]: I1210 06:48:15.252693 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:15 crc kubenswrapper[4765]: I1210 06:48:15.253871 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:15 crc kubenswrapper[4765]: I1210 06:48:15.253927 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:15 crc kubenswrapper[4765]: I1210 06:48:15.253941 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:16 crc kubenswrapper[4765]: I1210 06:48:16.021043 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 10 06:48:16 crc kubenswrapper[4765]: I1210 06:48:16.021231 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:16 crc kubenswrapper[4765]: I1210 06:48:16.022318 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:16 crc kubenswrapper[4765]: I1210 06:48:16.022378 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:16 crc kubenswrapper[4765]: I1210 06:48:16.022393 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:16 crc kubenswrapper[4765]: I1210 06:48:16.031855 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 10 06:48:16 crc kubenswrapper[4765]: I1210 06:48:16.659228 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:16 crc kubenswrapper[4765]: I1210 06:48:16.660508 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:16 crc kubenswrapper[4765]: I1210 06:48:16.660560 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:16 crc kubenswrapper[4765]: I1210 06:48:16.660582 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:17 crc kubenswrapper[4765]: I1210 06:48:17.645614 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:48:17 crc kubenswrapper[4765]: I1210 06:48:17.645811 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:17 crc kubenswrapper[4765]: I1210 06:48:17.646756 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:17 crc kubenswrapper[4765]: I1210 06:48:17.646797 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:17 crc kubenswrapper[4765]: I1210 06:48:17.646814 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:17 crc kubenswrapper[4765]: I1210 06:48:17.650156 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:48:17 crc kubenswrapper[4765]: I1210 06:48:17.661853 4765 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 10 06:48:17 crc kubenswrapper[4765]: I1210 06:48:17.661908 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:17 crc kubenswrapper[4765]: I1210 06:48:17.662824 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:17 crc kubenswrapper[4765]: I1210 06:48:17.662857 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:17 crc kubenswrapper[4765]: I1210 06:48:17.662869 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.180351 4765 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.181639 4765 trace.go:236] Trace[789126092]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (10-Dec-2025 06:48:04.843) (total time: 13338ms): Dec 10 06:48:18 crc kubenswrapper[4765]: Trace[789126092]: ---"Objects listed" error: 13338ms (06:48:18.181) Dec 10 06:48:18 crc kubenswrapper[4765]: Trace[789126092]: [13.338100632s] [13.338100632s] END Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.181667 4765 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.183161 4765 trace.go:236] Trace[349777033]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (10-Dec-2025 06:48:03.989) (total time: 14193ms): Dec 10 06:48:18 crc kubenswrapper[4765]: Trace[349777033]: ---"Objects listed" error: 14193ms (06:48:18.182) Dec 10 06:48:18 crc kubenswrapper[4765]: Trace[349777033]: [14.193943109s] [14.193943109s] END Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.183185 4765 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.183189 4765 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.183330 4765 trace.go:236] Trace[551734860]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (10-Dec-2025 06:48:03.479) (total time: 14703ms): Dec 10 06:48:18 crc kubenswrapper[4765]: Trace[551734860]: ---"Objects listed" error: 14703ms (06:48:18.183) Dec 10 06:48:18 crc kubenswrapper[4765]: Trace[551734860]: [14.703441997s] [14.703441997s] END Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.183342 4765 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.188212 4765 trace.go:236] Trace[1219643227]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (10-Dec-2025 06:48:04.124) (total time: 14063ms): Dec 10 06:48:18 crc kubenswrapper[4765]: Trace[1219643227]: ---"Objects listed" error: 14063ms (06:48:18.188) Dec 10 06:48:18 crc kubenswrapper[4765]: Trace[1219643227]: [14.063285252s] [14.063285252s] END Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.188233 4765 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.188949 4765 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.207230 4765 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.225782 4765 csr.go:261] certificate signing request csr-6kch4 is approved, waiting to be issued Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.227658 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.234271 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.235637 4765 csr.go:257] certificate signing request csr-6kch4 is issued Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.396150 4765 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:45106->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.396211 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:45106->192.168.126.11:17697: read: connection reset by peer" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.396487 4765 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.396522 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.495930 4765 apiserver.go:52] "Watching apiserver" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.500031 4765 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.500834 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g"] Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.501281 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.501480 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.501607 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.502017 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.502557 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.502648 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.502650 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.502649 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.502891 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.504226 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.504227 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.505221 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.505337 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.505583 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.505649 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.506322 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.507169 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.508002 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.520800 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.534036 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.545458 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.557693 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.568865 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.578697 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.588131 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.598854 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.599501 4765 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.612205 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.666263 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.667989 4765 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a" exitCode=255 Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.668066 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a"} Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.677747 4765 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.680857 4765 scope.go:117] "RemoveContainer" containerID="3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.682295 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.686281 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.686642 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.686679 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.686702 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.686722 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.686741 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.686765 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.686783 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.686800 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.686819 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.686840 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.686860 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.686886 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.686907 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.686930 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.686975 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687007 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687031 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687058 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687099 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687124 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687147 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687170 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687192 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687215 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687237 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687260 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687283 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687306 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687336 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687355 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687375 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687394 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687414 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687435 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687457 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687475 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687494 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687514 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687535 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687570 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687648 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687675 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687701 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687722 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687742 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687763 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687754 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687759 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687772 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687783 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687874 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687907 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687931 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687912 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687953 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687976 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687985 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.687997 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688023 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688057 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688104 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688129 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688152 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688176 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688198 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688221 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688244 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688264 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688291 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688316 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688348 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688371 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688393 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688417 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688462 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688486 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688510 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688532 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688555 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688578 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688605 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688631 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688653 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688680 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688708 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688733 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688758 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688782 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688806 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688830 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688853 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688877 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688900 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688923 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688945 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688967 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688991 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689016 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689039 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689062 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689104 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689129 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689154 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689178 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689202 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689224 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689248 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689270 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689292 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689315 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689338 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689358 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689383 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689406 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689459 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689486 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689512 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689534 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689558 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689583 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689609 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689632 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689656 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689680 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689705 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689729 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689756 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689778 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689800 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689825 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689850 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689874 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689900 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689927 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689951 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689976 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689997 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.690021 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.690048 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.690077 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.690772 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.690798 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.690824 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.690851 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.690876 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.690898 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.690920 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.690949 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691015 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691045 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691069 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691108 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691131 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691156 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691183 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691205 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691229 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691252 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691277 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691300 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691324 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691345 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691367 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691395 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691423 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691459 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691484 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691510 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691536 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691560 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691581 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691604 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691628 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691653 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691679 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691706 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691735 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691758 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691783 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691808 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691830 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691851 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691926 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.691972 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.692000 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.692025 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.692050 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.692074 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.692118 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.700478 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.700541 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.700572 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.700616 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.702011 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688152 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.712673 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688307 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688495 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688622 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688700 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688711 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688808 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688818 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.688991 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689012 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689136 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689156 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689194 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689298 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689369 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689393 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689426 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689556 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689766 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689823 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689826 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.689998 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.690304 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.690450 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.690474 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.690899 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.690922 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.695155 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.695485 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.695589 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.695730 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.695973 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.696062 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.696215 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.696244 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.696273 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.696335 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.696504 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.696561 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.696520 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.696704 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.696718 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.696759 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.696834 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.700374 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.700994 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.700881 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.701111 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.701184 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.701268 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:48:19.201192655 +0000 UTC m=+18.927867921 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.701309 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.702944 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.703378 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.703863 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.703872 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.703889 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.704734 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.705179 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.705203 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.705461 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.705505 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.705810 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.705909 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.705935 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.706274 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.706599 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.706608 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.706775 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.706894 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.707188 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.707243 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.707298 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.708031 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.708030 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.708383 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.710738 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.710921 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.710998 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.711040 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.711484 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.711484 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.711498 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.711676 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.711767 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.712430 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.712455 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.712484 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.712513 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.712640 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713415 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713463 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713491 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713518 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713542 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713560 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713586 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713611 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713635 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713678 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713746 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713785 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713818 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713845 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713873 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713900 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713947 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713973 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713998 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.715877 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.715977 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.716228 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.716281 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.716317 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.716347 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.716444 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.716470 4765 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.716489 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.716504 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.716518 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.716530 4765 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.716544 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.716557 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.716570 4765 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.716583 4765 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.716726 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.717167 4765 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.717468 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713746 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.713867 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.714044 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.714126 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.714286 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.714452 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.714691 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.714877 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.715080 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.715181 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.715201 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.715487 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.715591 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.715865 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.716072 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.716199 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.716258 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.717079 4765 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.717447 4765 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.717824 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.717847 4765 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.717869 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.717904 4765 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.717929 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.717946 4765 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.717969 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.717986 4765 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.718001 4765 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.718017 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.718033 4765 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.718050 4765 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.718065 4765 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.718218 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.718280 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.718905 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:19.218879852 +0000 UTC m=+18.945545168 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.719032 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:19.219019166 +0000 UTC m=+18.945684542 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.718940 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719072 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719122 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719137 4765 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719150 4765 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719163 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719176 4765 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719189 4765 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719202 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719214 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719226 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719240 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719256 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719270 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719287 4765 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719299 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719312 4765 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719324 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719335 4765 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719347 4765 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719359 4765 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719371 4765 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719386 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719399 4765 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719383 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719411 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719439 4765 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719452 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719466 4765 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719862 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719888 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719903 4765 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719916 4765 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719930 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719944 4765 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719955 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719967 4765 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719980 4765 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719903 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.719992 4765 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.720031 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.720046 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.720059 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.720073 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.720106 4765 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.720122 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.720135 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.720150 4765 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.720164 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.720177 4765 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.720196 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.720208 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.720257 4765 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.720270 4765 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.720283 4765 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.720947 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.722813 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.723401 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.723924 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.724110 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.728678 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.728820 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.728846 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.728861 4765 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.728920 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:19.228900655 +0000 UTC m=+18.955566051 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.729507 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.730329 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.730499 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.730515 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.730576 4765 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:18 crc kubenswrapper[4765]: E1210 06:48:18.730620 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:19.230599585 +0000 UTC m=+18.957264971 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.731004 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.731312 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.731346 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.731455 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.733586 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.733592 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.733945 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.733992 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.735124 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.735072 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.735647 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.735761 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.736711 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.736731 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.736851 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.736956 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.737430 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.737765 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.739144 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.739294 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.739613 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.739606 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.739684 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.739802 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.740060 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.740207 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.740283 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.740606 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.740726 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.740784 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.740841 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.741735 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.742776 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.742930 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.743015 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.743434 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.744358 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.744910 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.745313 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.745309 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.745333 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.745878 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.745986 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.746150 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.745067 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.746175 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.746273 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.746374 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.746634 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.746742 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.747065 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.749340 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.749936 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.750906 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.751227 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.751574 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.769593 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.776555 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.784031 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.784867 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.784948 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.785126 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.785249 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.785694 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.793405 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.793579 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.793684 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.794163 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.794654 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.794848 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.796314 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.796360 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.796776 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.799035 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.801388 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.803909 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.805996 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-t8knp"] Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.806345 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-t8knp" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.810502 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.810717 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.810860 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.816893 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.820455 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.820718 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.820753 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.820798 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.820862 4765 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.820878 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.820888 4765 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.820898 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.820909 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.820941 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.820951 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.820958 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.820966 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.820978 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.820987 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.820996 4765 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.820999 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821010 4765 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821013 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821020 4765 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821126 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821141 4765 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821151 4765 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821166 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821176 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821185 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821199 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821222 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821254 4765 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821263 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821276 4765 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821284 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821293 4765 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821304 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821314 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821323 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821352 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821360 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821371 4765 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821380 4765 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821388 4765 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821413 4765 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821424 4765 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821432 4765 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821440 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821451 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821472 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821481 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821489 4765 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821509 4765 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821518 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821527 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821535 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821545 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821591 4765 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821600 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821608 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821620 4765 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821629 4765 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821638 4765 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821698 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821714 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821761 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821771 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821781 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821790 4765 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821802 4765 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821810 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821828 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821836 4765 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821846 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821854 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821877 4765 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821890 4765 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821922 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821930 4765 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821939 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821950 4765 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821970 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.821995 4765 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822003 4765 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822014 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822022 4765 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822030 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822039 4765 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822049 4765 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822058 4765 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822067 4765 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822075 4765 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822101 4765 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822113 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822121 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822132 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822172 4765 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822374 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822384 4765 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822405 4765 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822429 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822516 4765 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822528 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822540 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822548 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822556 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822659 4765 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822671 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822680 4765 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822690 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822703 4765 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822712 4765 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822731 4765 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822796 4765 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822812 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822822 4765 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822830 4765 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822953 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822964 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.822986 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.823006 4765 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.826790 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.826827 4765 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.826841 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.826853 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.826862 4765 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.826919 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.827072 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.835288 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: W1210 06:48:18.850643 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-d6305443a75b6bce233d45c8927b49643add0e6c2de3461d4daadb882ad88948 WatchSource:0}: Error finding container d6305443a75b6bce233d45c8927b49643add0e6c2de3461d4daadb882ad88948: Status 404 returned error can't find the container with id d6305443a75b6bce233d45c8927b49643add0e6c2de3461d4daadb882ad88948 Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.850686 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.863410 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.875035 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.890163 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.901462 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.915440 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.925033 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.927280 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc-hosts-file\") pod \"node-resolver-t8knp\" (UID: \"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\") " pod="openshift-dns/node-resolver-t8knp" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.927328 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfzct\" (UniqueName: \"kubernetes.io/projected/4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc-kube-api-access-jfzct\") pod \"node-resolver-t8knp\" (UID: \"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\") " pod="openshift-dns/node-resolver-t8knp" Dec 10 06:48:18 crc kubenswrapper[4765]: I1210 06:48:18.927358 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.028549 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc-hosts-file\") pod \"node-resolver-t8knp\" (UID: \"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\") " pod="openshift-dns/node-resolver-t8knp" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.028595 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfzct\" (UniqueName: \"kubernetes.io/projected/4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc-kube-api-access-jfzct\") pod \"node-resolver-t8knp\" (UID: \"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\") " pod="openshift-dns/node-resolver-t8knp" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.028737 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc-hosts-file\") pod \"node-resolver-t8knp\" (UID: \"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\") " pod="openshift-dns/node-resolver-t8knp" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.058971 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfzct\" (UniqueName: \"kubernetes.io/projected/4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc-kube-api-access-jfzct\") pod \"node-resolver-t8knp\" (UID: \"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\") " pod="openshift-dns/node-resolver-t8knp" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.113330 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.122425 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-t8knp" Dec 10 06:48:19 crc kubenswrapper[4765]: W1210 06:48:19.124421 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-f94205d6b0332cadd94e7345c2f9c931d78b206155015a10abb8dc0162837999 WatchSource:0}: Error finding container f94205d6b0332cadd94e7345c2f9c931d78b206155015a10abb8dc0162837999: Status 404 returned error can't find the container with id f94205d6b0332cadd94e7345c2f9c931d78b206155015a10abb8dc0162837999 Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.229815 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.229913 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:19 crc kubenswrapper[4765]: E1210 06:48:19.229995 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:48:20.229963182 +0000 UTC m=+19.956628498 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:48:19 crc kubenswrapper[4765]: E1210 06:48:19.230053 4765 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.230067 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.230108 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:19 crc kubenswrapper[4765]: E1210 06:48:19.230125 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:20.230107966 +0000 UTC m=+19.956773352 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 06:48:19 crc kubenswrapper[4765]: E1210 06:48:19.230226 4765 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 06:48:19 crc kubenswrapper[4765]: E1210 06:48:19.230296 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 06:48:19 crc kubenswrapper[4765]: E1210 06:48:19.230312 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 06:48:19 crc kubenswrapper[4765]: E1210 06:48:19.230340 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:20.230317902 +0000 UTC m=+19.956983218 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 06:48:19 crc kubenswrapper[4765]: E1210 06:48:19.230347 4765 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:19 crc kubenswrapper[4765]: E1210 06:48:19.230382 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:20.230372834 +0000 UTC m=+19.957038140 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.237461 4765 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-10 06:43:18 +0000 UTC, rotation deadline is 2026-11-01 06:40:28.350459074 +0000 UTC Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.237520 4765 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7823h52m9.112940821s for next certificate rotation Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.330626 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:19 crc kubenswrapper[4765]: E1210 06:48:19.330779 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 06:48:19 crc kubenswrapper[4765]: E1210 06:48:19.330795 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 06:48:19 crc kubenswrapper[4765]: E1210 06:48:19.330805 4765 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:19 crc kubenswrapper[4765]: E1210 06:48:19.330856 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:20.330841561 +0000 UTC m=+20.057506877 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.588477 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:19 crc kubenswrapper[4765]: E1210 06:48:19.588613 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.671985 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-t8knp" event={"ID":"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc","Type":"ContainerStarted","Data":"1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f"} Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.672033 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-t8knp" event={"ID":"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc","Type":"ContainerStarted","Data":"36f4588c92d97f453f4373a581d302f85e69b231a0d885d5679445fc3bfe3840"} Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.673441 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"d6305443a75b6bce233d45c8927b49643add0e6c2de3461d4daadb882ad88948"} Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.675277 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687"} Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.675333 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9"} Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.675357 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5f76f78bab757ef655b47e625885d13fb0eccd4dee7650a76cc5df257f7c06b2"} Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.677418 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.678963 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78"} Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.679246 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.680293 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6"} Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.680339 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"f94205d6b0332cadd94e7345c2f9c931d78b206155015a10abb8dc0162837999"} Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.696924 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:19Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.723656 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:19Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.739671 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:19Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.755931 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:19Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.768846 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:19Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.778839 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:19Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.791202 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:19Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.802239 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:19Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.812496 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:19Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.837408 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:19Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.875419 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:19Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.885560 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:19Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.898029 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:19Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.909753 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:19Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.921857 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:19Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.934371 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:19Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.946378 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:19Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:19 crc kubenswrapper[4765]: I1210 06:48:19.955343 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:19Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.238700 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.238792 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.238820 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:20 crc kubenswrapper[4765]: E1210 06:48:20.238871 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:48:22.238852494 +0000 UTC m=+21.965517830 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.238911 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:20 crc kubenswrapper[4765]: E1210 06:48:20.238928 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 06:48:20 crc kubenswrapper[4765]: E1210 06:48:20.238945 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 06:48:20 crc kubenswrapper[4765]: E1210 06:48:20.238957 4765 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:20 crc kubenswrapper[4765]: E1210 06:48:20.238980 4765 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 06:48:20 crc kubenswrapper[4765]: E1210 06:48:20.238997 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:22.238984688 +0000 UTC m=+21.965650004 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:20 crc kubenswrapper[4765]: E1210 06:48:20.239012 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:22.239003889 +0000 UTC m=+21.965669205 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 06:48:20 crc kubenswrapper[4765]: E1210 06:48:20.239043 4765 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 06:48:20 crc kubenswrapper[4765]: E1210 06:48:20.239067 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:22.23905961 +0000 UTC m=+21.965724926 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.340360 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:20 crc kubenswrapper[4765]: E1210 06:48:20.340478 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 06:48:20 crc kubenswrapper[4765]: E1210 06:48:20.340665 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 06:48:20 crc kubenswrapper[4765]: E1210 06:48:20.340684 4765 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:20 crc kubenswrapper[4765]: E1210 06:48:20.340802 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:22.340732403 +0000 UTC m=+22.067397729 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.367189 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-dcsjq"] Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.367566 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-dcsjq" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.369068 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.369277 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.369365 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.370778 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.383510 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.395770 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.406171 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.416750 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.423417 4765 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 10 06:48:20 crc kubenswrapper[4765]: W1210 06:48:20.423854 4765 reflector.go:484] object-"openshift-image-registry"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-image-registry"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 10 06:48:20 crc kubenswrapper[4765]: W1210 06:48:20.423947 4765 reflector.go:484] object-"openshift-image-registry"/"node-ca-dockercfg-4777p": watch of *v1.Secret ended with: very short watch: object-"openshift-image-registry"/"node-ca-dockercfg-4777p": Unexpected watch close - watch lasted less than a second and no items received Dec 10 06:48:20 crc kubenswrapper[4765]: W1210 06:48:20.424057 4765 reflector.go:484] object-"openshift-image-registry"/"image-registry-certificates": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-image-registry"/"image-registry-certificates": Unexpected watch close - watch lasted less than a second and no items received Dec 10 06:48:20 crc kubenswrapper[4765]: W1210 06:48:20.424461 4765 reflector.go:484] object-"openshift-image-registry"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-image-registry"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.424573 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-diagnostics/pods/network-check-target-xd92c/status\": read tcp 38.102.83.41:49824->38.102.83.41:6443: use of closed network connection" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.441331 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c565f723-8bce-482f-a2c7-19581496ee74-serviceca\") pod \"node-ca-dcsjq\" (UID: \"c565f723-8bce-482f-a2c7-19581496ee74\") " pod="openshift-image-registry/node-ca-dcsjq" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.441410 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rvhm\" (UniqueName: \"kubernetes.io/projected/c565f723-8bce-482f-a2c7-19581496ee74-kube-api-access-4rvhm\") pod \"node-ca-dcsjq\" (UID: \"c565f723-8bce-482f-a2c7-19581496ee74\") " pod="openshift-image-registry/node-ca-dcsjq" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.441499 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c565f723-8bce-482f-a2c7-19581496ee74-host\") pod \"node-ca-dcsjq\" (UID: \"c565f723-8bce-482f-a2c7-19581496ee74\") " pod="openshift-image-registry/node-ca-dcsjq" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.445124 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.457107 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.474661 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.489805 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.500147 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.542575 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rvhm\" (UniqueName: \"kubernetes.io/projected/c565f723-8bce-482f-a2c7-19581496ee74-kube-api-access-4rvhm\") pod \"node-ca-dcsjq\" (UID: \"c565f723-8bce-482f-a2c7-19581496ee74\") " pod="openshift-image-registry/node-ca-dcsjq" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.542818 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c565f723-8bce-482f-a2c7-19581496ee74-host\") pod \"node-ca-dcsjq\" (UID: \"c565f723-8bce-482f-a2c7-19581496ee74\") " pod="openshift-image-registry/node-ca-dcsjq" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.542909 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c565f723-8bce-482f-a2c7-19581496ee74-serviceca\") pod \"node-ca-dcsjq\" (UID: \"c565f723-8bce-482f-a2c7-19581496ee74\") " pod="openshift-image-registry/node-ca-dcsjq" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.542963 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c565f723-8bce-482f-a2c7-19581496ee74-host\") pod \"node-ca-dcsjq\" (UID: \"c565f723-8bce-482f-a2c7-19581496ee74\") " pod="openshift-image-registry/node-ca-dcsjq" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.543977 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c565f723-8bce-482f-a2c7-19581496ee74-serviceca\") pod \"node-ca-dcsjq\" (UID: \"c565f723-8bce-482f-a2c7-19581496ee74\") " pod="openshift-image-registry/node-ca-dcsjq" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.568671 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rvhm\" (UniqueName: \"kubernetes.io/projected/c565f723-8bce-482f-a2c7-19581496ee74-kube-api-access-4rvhm\") pod \"node-ca-dcsjq\" (UID: \"c565f723-8bce-482f-a2c7-19581496ee74\") " pod="openshift-image-registry/node-ca-dcsjq" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.588470 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:20 crc kubenswrapper[4765]: E1210 06:48:20.588604 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.588790 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:20 crc kubenswrapper[4765]: E1210 06:48:20.588960 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.595225 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.596136 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.597485 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.598312 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.599523 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.600169 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.600839 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.601946 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.602636 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.603567 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.603730 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.604386 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.605702 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.606291 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.607015 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.608973 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.609720 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.610799 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.611301 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.611948 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.613304 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.613993 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.614683 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.615285 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.615805 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.617042 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.617583 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.618347 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.619982 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.620497 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.621108 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.621669 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.622151 4765 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.622300 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.623809 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.624373 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.624825 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.626285 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.627109 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.627658 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.628456 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.629106 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.630114 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.630681 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.631365 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.632251 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.633122 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.633638 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.634227 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.634726 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.635444 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.635989 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.636486 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.636965 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.637500 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.638152 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.638604 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.641839 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.665241 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.680234 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-dcsjq" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.686661 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: W1210 06:48:20.693702 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc565f723_8bce_482f_a2c7_19581496ee74.slice/crio-2b017329db2dbf551269322d2cdf970ca5dad233799947be327355fb813a5e76 WatchSource:0}: Error finding container 2b017329db2dbf551269322d2cdf970ca5dad233799947be327355fb813a5e76: Status 404 returned error can't find the container with id 2b017329db2dbf551269322d2cdf970ca5dad233799947be327355fb813a5e76 Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.710227 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.724541 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.742167 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:20 crc kubenswrapper[4765]: I1210 06:48:20.755953 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.192939 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-r78vd"] Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.193604 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-xlv8w"] Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.193745 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.193833 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.195780 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-lcr6j"] Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.196472 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.196869 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.197605 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.198280 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.200051 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.201378 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.201610 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.201681 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.201697 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5wj7r"] Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.201701 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.201760 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 10 06:48:21 crc kubenswrapper[4765]: W1210 06:48:21.201769 4765 reflector.go:561] object-"openshift-multus"/"default-cni-sysctl-allowlist": failed to list *v1.ConfigMap: configmaps "default-cni-sysctl-allowlist" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 10 06:48:21 crc kubenswrapper[4765]: E1210 06:48:21.201814 4765 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"default-cni-sysctl-allowlist\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"default-cni-sysctl-allowlist\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 10 06:48:21 crc kubenswrapper[4765]: W1210 06:48:21.201767 4765 reflector.go:561] object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz": failed to list *v1.Secret: secrets "multus-ancillary-tools-dockercfg-vnmsz" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 10 06:48:21 crc kubenswrapper[4765]: E1210 06:48:21.201851 4765 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"multus-ancillary-tools-dockercfg-vnmsz\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"multus-ancillary-tools-dockercfg-vnmsz\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.202202 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.204763 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.208550 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.208832 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.208941 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.209020 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.209128 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.208941 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.209354 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.217739 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.230360 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.243830 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.249506 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-multus-cni-dir\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.249551 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-multus-socket-dir-parent\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.249581 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/51cca3f8-b6e3-4c05-a289-32192e52215a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.249605 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-run-ovn-kubernetes\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.249672 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/51cca3f8-b6e3-4c05-a289-32192e52215a-cnibin\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.249725 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-host-var-lib-kubelet\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.249802 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-multus-conf-dir\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.249826 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-kubelet\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.249844 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-systemd-units\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.249863 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-log-socket\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.249887 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d5227381-9852-49ce-96f1-220c42aab12a-multus-daemon-config\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.249905 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-run-openvswitch\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.249927 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-var-lib-openvswitch\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.249954 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-ovnkube-script-lib\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.249974 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-slash\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.249996 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-run-systemd\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250021 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-host-var-lib-cni-bin\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250049 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nc9wc\" (UniqueName: \"kubernetes.io/projected/d5227381-9852-49ce-96f1-220c42aab12a-kube-api-access-nc9wc\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250101 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-ovn-node-metrics-cert\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250165 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/d0714d9a-e9d5-4aca-8341-a073849f9234-rootfs\") pod \"machine-config-daemon-xlv8w\" (UID: \"d0714d9a-e9d5-4aca-8341-a073849f9234\") " pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250188 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/51cca3f8-b6e3-4c05-a289-32192e52215a-cni-binary-copy\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250211 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-run-ovn\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250233 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-node-log\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250262 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-system-cni-dir\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250279 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-host-run-k8s-cni-cncf-io\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250303 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-host-var-lib-cni-multus\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250323 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-run-netns\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250343 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-env-overrides\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250368 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-etc-openvswitch\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250390 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d0714d9a-e9d5-4aca-8341-a073849f9234-mcd-auth-proxy-config\") pod \"machine-config-daemon-xlv8w\" (UID: \"d0714d9a-e9d5-4aca-8341-a073849f9234\") " pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250420 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-host-run-netns\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250436 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-host-run-multus-certs\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250453 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-cni-netd\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250475 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/51cca3f8-b6e3-4c05-a289-32192e52215a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250509 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-etc-kubernetes\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250540 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d0714d9a-e9d5-4aca-8341-a073849f9234-proxy-tls\") pod \"machine-config-daemon-xlv8w\" (UID: \"d0714d9a-e9d5-4aca-8341-a073849f9234\") " pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250556 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/51cca3f8-b6e3-4c05-a289-32192e52215a-system-cni-dir\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250600 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250628 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-ovnkube-config\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250649 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8w29\" (UniqueName: \"kubernetes.io/projected/51cca3f8-b6e3-4c05-a289-32192e52215a-kube-api-access-z8w29\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250670 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-cnibin\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250689 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-hostroot\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250708 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-cni-bin\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250731 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsdwg\" (UniqueName: \"kubernetes.io/projected/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-kube-api-access-lsdwg\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250760 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wt5g\" (UniqueName: \"kubernetes.io/projected/d0714d9a-e9d5-4aca-8341-a073849f9234-kube-api-access-4wt5g\") pod \"machine-config-daemon-xlv8w\" (UID: \"d0714d9a-e9d5-4aca-8341-a073849f9234\") " pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250784 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-os-release\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250801 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d5227381-9852-49ce-96f1-220c42aab12a-cni-binary-copy\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.250816 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/51cca3f8-b6e3-4c05-a289-32192e52215a-os-release\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.254533 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.268025 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.279151 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.291911 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.303507 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.315549 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.325018 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.327699 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.340034 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.351839 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352019 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d5227381-9852-49ce-96f1-220c42aab12a-multus-daemon-config\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352376 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-run-openvswitch\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352419 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-var-lib-openvswitch\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352448 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-ovnkube-script-lib\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352473 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-slash\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352473 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-run-openvswitch\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352491 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-var-lib-openvswitch\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352494 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-run-systemd\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352526 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-run-systemd\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352537 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-slash\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352573 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-host-var-lib-cni-bin\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352599 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nc9wc\" (UniqueName: \"kubernetes.io/projected/d5227381-9852-49ce-96f1-220c42aab12a-kube-api-access-nc9wc\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352704 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-host-var-lib-cni-bin\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352716 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-ovn-node-metrics-cert\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352748 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/d0714d9a-e9d5-4aca-8341-a073849f9234-rootfs\") pod \"machine-config-daemon-xlv8w\" (UID: \"d0714d9a-e9d5-4aca-8341-a073849f9234\") " pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352768 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/51cca3f8-b6e3-4c05-a289-32192e52215a-cni-binary-copy\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352812 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/d0714d9a-e9d5-4aca-8341-a073849f9234-rootfs\") pod \"machine-config-daemon-xlv8w\" (UID: \"d0714d9a-e9d5-4aca-8341-a073849f9234\") " pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352793 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-run-netns\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352895 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-run-ovn\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352920 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-node-log\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352930 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-run-netns\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352948 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-system-cni-dir\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352967 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-host-run-k8s-cni-cncf-io\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352973 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-node-log\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352981 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-host-var-lib-cni-multus\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352974 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-run-ovn\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353004 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-host-run-k8s-cni-cncf-io\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.352997 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-env-overrides\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353036 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-host-var-lib-cni-multus\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353041 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-system-cni-dir\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353100 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-etc-openvswitch\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353142 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d0714d9a-e9d5-4aca-8341-a073849f9234-mcd-auth-proxy-config\") pod \"machine-config-daemon-xlv8w\" (UID: \"d0714d9a-e9d5-4aca-8341-a073849f9234\") " pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353170 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/51cca3f8-b6e3-4c05-a289-32192e52215a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353207 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-host-run-netns\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353223 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-etc-openvswitch\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353230 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-host-run-multus-certs\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353283 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d5227381-9852-49ce-96f1-220c42aab12a-multus-daemon-config\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353293 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-cni-netd\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353313 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-etc-kubernetes\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353331 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d0714d9a-e9d5-4aca-8341-a073849f9234-proxy-tls\") pod \"machine-config-daemon-xlv8w\" (UID: \"d0714d9a-e9d5-4aca-8341-a073849f9234\") " pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353335 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-host-run-netns\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353349 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/51cca3f8-b6e3-4c05-a289-32192e52215a-system-cni-dir\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353364 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-cni-netd\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353369 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-ovnkube-config\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353392 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353412 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wt5g\" (UniqueName: \"kubernetes.io/projected/d0714d9a-e9d5-4aca-8341-a073849f9234-kube-api-access-4wt5g\") pod \"machine-config-daemon-xlv8w\" (UID: \"d0714d9a-e9d5-4aca-8341-a073849f9234\") " pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353427 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8w29\" (UniqueName: \"kubernetes.io/projected/51cca3f8-b6e3-4c05-a289-32192e52215a-kube-api-access-z8w29\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353445 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-cnibin\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353460 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-hostroot\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353474 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-cni-bin\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353487 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsdwg\" (UniqueName: \"kubernetes.io/projected/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-kube-api-access-lsdwg\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353505 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-os-release\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353519 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d5227381-9852-49ce-96f1-220c42aab12a-cni-binary-copy\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353533 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/51cca3f8-b6e3-4c05-a289-32192e52215a-os-release\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353530 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-env-overrides\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353549 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-multus-cni-dir\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353590 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-multus-socket-dir-parent\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353606 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-ovnkube-script-lib\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353613 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/51cca3f8-b6e3-4c05-a289-32192e52215a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353668 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-log-socket\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353699 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-cni-bin\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353696 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-run-ovn-kubernetes\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353721 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-multus-cni-dir\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353728 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-run-ovn-kubernetes\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353730 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/51cca3f8-b6e3-4c05-a289-32192e52215a-system-cni-dir\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353731 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/51cca3f8-b6e3-4c05-a289-32192e52215a-cnibin\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353758 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/51cca3f8-b6e3-4c05-a289-32192e52215a-cni-binary-copy\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353771 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-host-var-lib-kubelet\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353674 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-multus-socket-dir-parent\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353800 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-multus-conf-dir\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353826 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-kubelet\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353841 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-log-socket\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353848 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-systemd-units\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353875 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-systemd-units\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353747 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/51cca3f8-b6e3-4c05-a289-32192e52215a-cnibin\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353262 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-host-run-multus-certs\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353908 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d0714d9a-e9d5-4aca-8341-a073849f9234-mcd-auth-proxy-config\") pod \"machine-config-daemon-xlv8w\" (UID: \"d0714d9a-e9d5-4aca-8341-a073849f9234\") " pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353918 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-host-var-lib-kubelet\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353943 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-kubelet\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353949 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-cnibin\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353991 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-hostroot\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.353989 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.354035 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-etc-kubernetes\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.354241 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-multus-conf-dir\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.354245 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d5227381-9852-49ce-96f1-220c42aab12a-cni-binary-copy\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.354273 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/51cca3f8-b6e3-4c05-a289-32192e52215a-os-release\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.354345 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-ovnkube-config\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.354440 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/51cca3f8-b6e3-4c05-a289-32192e52215a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.354266 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d5227381-9852-49ce-96f1-220c42aab12a-os-release\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.358448 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-ovn-node-metrics-cert\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.358522 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d0714d9a-e9d5-4aca-8341-a073849f9234-proxy-tls\") pod \"machine-config-daemon-xlv8w\" (UID: \"d0714d9a-e9d5-4aca-8341-a073849f9234\") " pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.365439 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.368882 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsdwg\" (UniqueName: \"kubernetes.io/projected/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-kube-api-access-lsdwg\") pod \"ovnkube-node-5wj7r\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.369561 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wt5g\" (UniqueName: \"kubernetes.io/projected/d0714d9a-e9d5-4aca-8341-a073849f9234-kube-api-access-4wt5g\") pod \"machine-config-daemon-xlv8w\" (UID: \"d0714d9a-e9d5-4aca-8341-a073849f9234\") " pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.371807 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8w29\" (UniqueName: \"kubernetes.io/projected/51cca3f8-b6e3-4c05-a289-32192e52215a-kube-api-access-z8w29\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.372998 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nc9wc\" (UniqueName: \"kubernetes.io/projected/d5227381-9852-49ce-96f1-220c42aab12a-kube-api-access-nc9wc\") pod \"multus-r78vd\" (UID: \"d5227381-9852-49ce-96f1-220c42aab12a\") " pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.376400 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.386544 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.389576 4765 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.391152 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.391219 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.391234 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.391382 4765 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.397248 4765 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.397860 4765 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.398207 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.399294 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.399332 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.399342 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.399364 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.399375 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:21Z","lastTransitionTime":"2025-12-10T06:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.411866 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: E1210 06:48:21.413304 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.415967 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.416004 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.416014 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.416039 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.416049 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:21Z","lastTransitionTime":"2025-12-10T06:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.422302 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: E1210 06:48:21.427068 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.430598 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.430639 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.430648 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.430666 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.430677 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:21Z","lastTransitionTime":"2025-12-10T06:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.433448 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: E1210 06:48:21.441069 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.443579 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.444313 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.444349 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.444358 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.444373 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.444383 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:21Z","lastTransitionTime":"2025-12-10T06:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.456230 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: E1210 06:48:21.456273 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.459269 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.459304 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.459314 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.459329 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.459339 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:21Z","lastTransitionTime":"2025-12-10T06:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.468270 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: E1210 06:48:21.471221 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: E1210 06:48:21.471359 4765 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.472932 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.472971 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.472984 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.473002 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.473013 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:21Z","lastTransitionTime":"2025-12-10T06:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.479714 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.490894 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.499782 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.513980 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.516968 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.521055 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-r78vd" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.538348 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:21 crc kubenswrapper[4765]: W1210 06:48:21.568433 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ba0a5a0_6a96_4829_b5c0_6fe8310ab2e3.slice/crio-45fe4e94f0afb580ee340cea8197c0bdaa468cfec55a6e2291201bd627e0d98e WatchSource:0}: Error finding container 45fe4e94f0afb580ee340cea8197c0bdaa468cfec55a6e2291201bd627e0d98e: Status 404 returned error can't find the container with id 45fe4e94f0afb580ee340cea8197c0bdaa468cfec55a6e2291201bd627e0d98e Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.579481 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.579560 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.579570 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.579589 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.579604 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:21Z","lastTransitionTime":"2025-12-10T06:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.593576 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:21 crc kubenswrapper[4765]: E1210 06:48:21.593707 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.681689 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.681723 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.681733 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.681747 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.681756 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:21Z","lastTransitionTime":"2025-12-10T06:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.691176 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.693141 4765 generic.go:334] "Generic (PLEG): container finished" podID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerID="575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d" exitCode=0 Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.693215 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerDied","Data":"575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.693250 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerStarted","Data":"45fe4e94f0afb580ee340cea8197c0bdaa468cfec55a6e2291201bd627e0d98e"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.694507 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-r78vd" event={"ID":"d5227381-9852-49ce-96f1-220c42aab12a","Type":"ContainerStarted","Data":"ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.694528 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-r78vd" event={"ID":"d5227381-9852-49ce-96f1-220c42aab12a","Type":"ContainerStarted","Data":"e8a7109f5488613ec782928b0d55fb1bc6edba4ea8e8ad9da5bfd6ffe725fff6"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.696536 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.696565 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"bab2eb6ef64f1beab17597654516c7a93389a6f124608010c1b5224466f394c9"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.699018 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-dcsjq" event={"ID":"c565f723-8bce-482f-a2c7-19581496ee74","Type":"ContainerStarted","Data":"a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.699048 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-dcsjq" event={"ID":"c565f723-8bce-482f-a2c7-19581496ee74","Type":"ContainerStarted","Data":"2b017329db2dbf551269322d2cdf970ca5dad233799947be327355fb813a5e76"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.708392 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.723040 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.732367 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.746604 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.760380 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.773975 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.783652 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.783697 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.783713 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.783730 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.783739 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:21Z","lastTransitionTime":"2025-12-10T06:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.788507 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.809207 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.824369 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.842408 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.857911 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.865185 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.879199 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.889279 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.889324 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.889338 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.889357 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.889373 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:21Z","lastTransitionTime":"2025-12-10T06:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.900033 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.915708 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.929942 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.941864 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.954547 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.965662 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.985741 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:21Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.989817 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.991437 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.991465 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.991475 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.991490 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:21 crc kubenswrapper[4765]: I1210 06:48:21.991499 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:21Z","lastTransitionTime":"2025-12-10T06:48:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.002390 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.017121 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.029274 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.047019 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.086800 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.093788 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.093835 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.093846 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.093863 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.093876 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:22Z","lastTransitionTime":"2025-12-10T06:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.128637 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.164222 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.196253 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.196287 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.196295 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.196311 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.196338 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:22Z","lastTransitionTime":"2025-12-10T06:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.207365 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.244995 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.261747 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.261891 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:22 crc kubenswrapper[4765]: E1210 06:48:22.261942 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:48:26.261892863 +0000 UTC m=+25.988558179 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:48:22 crc kubenswrapper[4765]: E1210 06:48:22.261996 4765 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.262033 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:22 crc kubenswrapper[4765]: E1210 06:48:22.262051 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:26.262034797 +0000 UTC m=+25.988700193 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.262069 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:22 crc kubenswrapper[4765]: E1210 06:48:22.262204 4765 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 06:48:22 crc kubenswrapper[4765]: E1210 06:48:22.262272 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 06:48:22 crc kubenswrapper[4765]: E1210 06:48:22.262285 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:26.262268974 +0000 UTC m=+25.988934290 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 06:48:22 crc kubenswrapper[4765]: E1210 06:48:22.262292 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 06:48:22 crc kubenswrapper[4765]: E1210 06:48:22.262305 4765 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:22 crc kubenswrapper[4765]: E1210 06:48:22.262348 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:26.262338236 +0000 UTC m=+25.989003632 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.278398 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.298868 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.298901 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.298912 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.298929 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.298939 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:22Z","lastTransitionTime":"2025-12-10T06:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.311363 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: E1210 06:48:22.354617 4765 configmap.go:193] Couldn't get configMap openshift-multus/default-cni-sysctl-allowlist: failed to sync configmap cache: timed out waiting for the condition Dec 10 06:48:22 crc kubenswrapper[4765]: E1210 06:48:22.354717 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/51cca3f8-b6e3-4c05-a289-32192e52215a-cni-sysctl-allowlist podName:51cca3f8-b6e3-4c05-a289-32192e52215a nodeName:}" failed. No retries permitted until 2025-12-10 06:48:22.854700545 +0000 UTC m=+22.581365861 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cni-sysctl-allowlist" (UniqueName: "kubernetes.io/configmap/51cca3f8-b6e3-4c05-a289-32192e52215a-cni-sysctl-allowlist") pod "multus-additional-cni-plugins-lcr6j" (UID: "51cca3f8-b6e3-4c05-a289-32192e52215a") : failed to sync configmap cache: timed out waiting for the condition Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.363035 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:22 crc kubenswrapper[4765]: E1210 06:48:22.363231 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 06:48:22 crc kubenswrapper[4765]: E1210 06:48:22.363246 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 06:48:22 crc kubenswrapper[4765]: E1210 06:48:22.363258 4765 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:22 crc kubenswrapper[4765]: E1210 06:48:22.363302 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:26.363285176 +0000 UTC m=+26.089950492 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.401711 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.402057 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.402065 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.402094 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.402103 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:22Z","lastTransitionTime":"2025-12-10T06:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.439925 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.504826 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.504866 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.504875 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.504892 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.504909 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:22Z","lastTransitionTime":"2025-12-10T06:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.588295 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.588304 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:22 crc kubenswrapper[4765]: E1210 06:48:22.588421 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:22 crc kubenswrapper[4765]: E1210 06:48:22.588475 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.610919 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.611038 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.611129 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.611209 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.611298 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:22Z","lastTransitionTime":"2025-12-10T06:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.705491 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerStarted","Data":"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85"} Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.705560 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerStarted","Data":"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230"} Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.705576 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerStarted","Data":"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd"} Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.705587 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerStarted","Data":"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176"} Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.705598 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerStarted","Data":"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f"} Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.705608 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerStarted","Data":"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe"} Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.707184 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176"} Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.713862 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.713894 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.713902 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.713916 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.713926 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:22Z","lastTransitionTime":"2025-12-10T06:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.725117 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.738226 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.749745 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.760408 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.769783 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.780257 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.789841 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.801401 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.812544 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.816586 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.816632 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.816642 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.816661 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.816671 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:22Z","lastTransitionTime":"2025-12-10T06:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.823919 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.833945 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.845795 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.856033 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.870493 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/51cca3f8-b6e3-4c05-a289-32192e52215a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.871022 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/51cca3f8-b6e3-4c05-a289-32192e52215a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-lcr6j\" (UID: \"51cca3f8-b6e3-4c05-a289-32192e52215a\") " pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.887151 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:22Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.918676 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.918921 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.918932 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.918952 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:22 crc kubenswrapper[4765]: I1210 06:48:22.918962 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:22Z","lastTransitionTime":"2025-12-10T06:48:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.021465 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.021496 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.021505 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.021521 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.021529 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:23Z","lastTransitionTime":"2025-12-10T06:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.028852 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.126960 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.127308 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.127319 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.127336 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.127346 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:23Z","lastTransitionTime":"2025-12-10T06:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.230010 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.230045 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.230055 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.230073 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.230116 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:23Z","lastTransitionTime":"2025-12-10T06:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.331923 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.331963 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.331975 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.331993 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.332005 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:23Z","lastTransitionTime":"2025-12-10T06:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.434620 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.434668 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.434683 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.434702 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.434713 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:23Z","lastTransitionTime":"2025-12-10T06:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.537349 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.537391 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.537402 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.537419 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.537431 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:23Z","lastTransitionTime":"2025-12-10T06:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.588826 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:23 crc kubenswrapper[4765]: E1210 06:48:23.588973 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.640173 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.640214 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.640223 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.640237 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.640247 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:23Z","lastTransitionTime":"2025-12-10T06:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.711457 4765 generic.go:334] "Generic (PLEG): container finished" podID="51cca3f8-b6e3-4c05-a289-32192e52215a" containerID="1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3" exitCode=0 Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.711562 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" event={"ID":"51cca3f8-b6e3-4c05-a289-32192e52215a","Type":"ContainerDied","Data":"1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3"} Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.711616 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" event={"ID":"51cca3f8-b6e3-4c05-a289-32192e52215a","Type":"ContainerStarted","Data":"f6a2abfee0fa3c0b7f4310db72d88ee75d2e91706f4265facd9471c8fda9a445"} Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.730147 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.742052 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.742103 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.742113 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.742128 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.742138 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:23Z","lastTransitionTime":"2025-12-10T06:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.743903 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.757689 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.772565 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.810402 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.825258 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.841971 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.844448 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.845042 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.845075 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.845120 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.845131 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:23Z","lastTransitionTime":"2025-12-10T06:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.855516 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.866567 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.877002 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.885065 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.898113 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.908440 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.924617 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.948587 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.948641 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.948652 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.948670 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:23 crc kubenswrapper[4765]: I1210 06:48:23.948682 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:23Z","lastTransitionTime":"2025-12-10T06:48:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.053493 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.053545 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.053560 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.053578 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.053590 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:24Z","lastTransitionTime":"2025-12-10T06:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.156529 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.156584 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.156594 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.156610 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.156620 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:24Z","lastTransitionTime":"2025-12-10T06:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.258796 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.258833 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.258841 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.258856 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.258864 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:24Z","lastTransitionTime":"2025-12-10T06:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.361157 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.361197 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.361205 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.361220 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.361228 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:24Z","lastTransitionTime":"2025-12-10T06:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.463437 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.463477 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.463485 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.463501 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.463512 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:24Z","lastTransitionTime":"2025-12-10T06:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.566072 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.566132 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.566143 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.566160 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.566171 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:24Z","lastTransitionTime":"2025-12-10T06:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.588816 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:24 crc kubenswrapper[4765]: E1210 06:48:24.588950 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.589004 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:24 crc kubenswrapper[4765]: E1210 06:48:24.589163 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.668599 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.668638 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.668646 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.668662 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.668673 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:24Z","lastTransitionTime":"2025-12-10T06:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.715553 4765 generic.go:334] "Generic (PLEG): container finished" podID="51cca3f8-b6e3-4c05-a289-32192e52215a" containerID="2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee" exitCode=0 Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.715625 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" event={"ID":"51cca3f8-b6e3-4c05-a289-32192e52215a","Type":"ContainerDied","Data":"2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee"} Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.729754 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:24Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.743260 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:24Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.756443 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:24Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.768689 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:24Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.770692 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.770730 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.770738 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.770753 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.770764 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:24Z","lastTransitionTime":"2025-12-10T06:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.786070 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:24Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.798205 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:24Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.809029 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:24Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.820528 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:24Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.834039 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:24Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.847826 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:24Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.858642 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:24Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.872516 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:24Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.873518 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.873546 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.873554 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.873569 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.873578 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:24Z","lastTransitionTime":"2025-12-10T06:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.883699 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:24Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.896723 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:24Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.976179 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.976230 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.976241 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.976257 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:24 crc kubenswrapper[4765]: I1210 06:48:24.976268 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:24Z","lastTransitionTime":"2025-12-10T06:48:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.078671 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.078723 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.078757 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.078777 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.078786 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:25Z","lastTransitionTime":"2025-12-10T06:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.180509 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.180542 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.180550 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.180577 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.180586 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:25Z","lastTransitionTime":"2025-12-10T06:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.282442 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.282491 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.282502 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.282517 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.282527 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:25Z","lastTransitionTime":"2025-12-10T06:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.384608 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.384640 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.384650 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.384667 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.384678 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:25Z","lastTransitionTime":"2025-12-10T06:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.489424 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.489465 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.489474 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.489490 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.489499 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:25Z","lastTransitionTime":"2025-12-10T06:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.588687 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:25 crc kubenswrapper[4765]: E1210 06:48:25.588822 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.591341 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.591386 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.591396 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.591412 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.591423 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:25Z","lastTransitionTime":"2025-12-10T06:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.693361 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.693395 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.693404 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.693419 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.693428 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:25Z","lastTransitionTime":"2025-12-10T06:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.721056 4765 generic.go:334] "Generic (PLEG): container finished" podID="51cca3f8-b6e3-4c05-a289-32192e52215a" containerID="34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6" exitCode=0 Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.721182 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" event={"ID":"51cca3f8-b6e3-4c05-a289-32192e52215a","Type":"ContainerDied","Data":"34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6"} Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.726261 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerStarted","Data":"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c"} Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.744732 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:25Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.759332 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:25Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.770712 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:25Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.782106 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:25Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.791429 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:25Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.795967 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.796010 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.796023 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.796041 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.796053 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:25Z","lastTransitionTime":"2025-12-10T06:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.810834 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:25Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.823587 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:25Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.833861 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:25Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.846054 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:25Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.856319 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:25Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.866987 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:25Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.875294 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:25Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.885871 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:25Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.896909 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:25Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.898882 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.898906 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.898915 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.898931 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:25 crc kubenswrapper[4765]: I1210 06:48:25.898940 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:25Z","lastTransitionTime":"2025-12-10T06:48:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.002038 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.002096 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.002112 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.002130 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.002141 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:26Z","lastTransitionTime":"2025-12-10T06:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.104002 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.104033 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.104043 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.104060 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.104070 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:26Z","lastTransitionTime":"2025-12-10T06:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.207508 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.207539 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.207548 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.207562 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.207573 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:26Z","lastTransitionTime":"2025-12-10T06:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.300469 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.300614 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:26 crc kubenswrapper[4765]: E1210 06:48:26.300680 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:48:34.300656145 +0000 UTC m=+34.027321461 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:48:26 crc kubenswrapper[4765]: E1210 06:48:26.300741 4765 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.300794 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:26 crc kubenswrapper[4765]: E1210 06:48:26.300890 4765 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 06:48:26 crc kubenswrapper[4765]: E1210 06:48:26.301220 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:34.300793909 +0000 UTC m=+34.027459295 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.301269 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:26 crc kubenswrapper[4765]: E1210 06:48:26.301374 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 06:48:26 crc kubenswrapper[4765]: E1210 06:48:26.301390 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 06:48:26 crc kubenswrapper[4765]: E1210 06:48:26.301401 4765 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:26 crc kubenswrapper[4765]: E1210 06:48:26.301444 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:34.301297144 +0000 UTC m=+34.027962540 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 06:48:26 crc kubenswrapper[4765]: E1210 06:48:26.301460 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:34.301455988 +0000 UTC m=+34.028121304 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.310431 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.310468 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.310482 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.310504 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.310517 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:26Z","lastTransitionTime":"2025-12-10T06:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.402703 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:26 crc kubenswrapper[4765]: E1210 06:48:26.402858 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 06:48:26 crc kubenswrapper[4765]: E1210 06:48:26.402876 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 06:48:26 crc kubenswrapper[4765]: E1210 06:48:26.402886 4765 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:26 crc kubenswrapper[4765]: E1210 06:48:26.402936 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:34.402923885 +0000 UTC m=+34.129589201 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.412822 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.412863 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.412872 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.412889 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.412897 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:26Z","lastTransitionTime":"2025-12-10T06:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.514781 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.514825 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.514835 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.514853 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.514864 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:26Z","lastTransitionTime":"2025-12-10T06:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.588574 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:26 crc kubenswrapper[4765]: E1210 06:48:26.588702 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.588586 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:26 crc kubenswrapper[4765]: E1210 06:48:26.588770 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.617471 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.617511 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.617523 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.617540 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.617551 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:26Z","lastTransitionTime":"2025-12-10T06:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.719912 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.719943 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.719952 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.719970 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.719984 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:26Z","lastTransitionTime":"2025-12-10T06:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.733477 4765 generic.go:334] "Generic (PLEG): container finished" podID="51cca3f8-b6e3-4c05-a289-32192e52215a" containerID="23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589" exitCode=0 Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.733527 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" event={"ID":"51cca3f8-b6e3-4c05-a289-32192e52215a","Type":"ContainerDied","Data":"23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589"} Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.750665 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:26Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.771383 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:26Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.780913 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:26Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.793297 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:26Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.807078 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:26Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.823719 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:26Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.824710 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.824747 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.824757 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.824776 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.824790 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:26Z","lastTransitionTime":"2025-12-10T06:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.837256 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:26Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.849500 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:26Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.862731 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:26Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.872980 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:26Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.891672 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:26Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.903149 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:26Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.915381 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:26Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.927925 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.927977 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.927990 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.928011 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.928023 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:26Z","lastTransitionTime":"2025-12-10T06:48:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:26 crc kubenswrapper[4765]: I1210 06:48:26.928807 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:26Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.030223 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.030570 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.030645 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.030711 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.030775 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:27Z","lastTransitionTime":"2025-12-10T06:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.132863 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.132892 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.132901 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.132918 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.132929 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:27Z","lastTransitionTime":"2025-12-10T06:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.235143 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.235183 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.235196 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.235247 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.235255 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:27Z","lastTransitionTime":"2025-12-10T06:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.337256 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.337290 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.337300 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.337315 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.337324 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:27Z","lastTransitionTime":"2025-12-10T06:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.440263 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.440310 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.440320 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.440337 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.440346 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:27Z","lastTransitionTime":"2025-12-10T06:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.542746 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.542807 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.542826 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.542854 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.542874 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:27Z","lastTransitionTime":"2025-12-10T06:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.588202 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:27 crc kubenswrapper[4765]: E1210 06:48:27.588325 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.646231 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.646280 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.646292 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.646312 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.646328 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:27Z","lastTransitionTime":"2025-12-10T06:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.741261 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" event={"ID":"51cca3f8-b6e3-4c05-a289-32192e52215a","Type":"ContainerStarted","Data":"c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551"} Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.746203 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerStarted","Data":"f5285c04913af0c8f19436f9775bb92fa805ae017667d065546b08c58770af4e"} Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.746439 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.746484 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.747762 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.747793 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.747800 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.747817 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.747836 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:27Z","lastTransitionTime":"2025-12-10T06:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.756791 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:27Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.769756 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:27Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.779048 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.780207 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:27Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.799329 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:27Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.814053 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:27Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.832951 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:27Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.847916 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:27Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.850902 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.850963 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.850974 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.850991 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.851002 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:27Z","lastTransitionTime":"2025-12-10T06:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.864034 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:27Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.882051 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:27Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.894787 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:27Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.907470 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:27Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.920746 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:27Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.932262 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:27Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.948444 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:27Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.953468 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.953512 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.953521 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.953539 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.953550 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:27Z","lastTransitionTime":"2025-12-10T06:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.969040 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5285c04913af0c8f19436f9775bb92fa805ae017667d065546b08c58770af4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:27Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:27 crc kubenswrapper[4765]: I1210 06:48:27.985521 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:27Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.000213 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:27Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.012901 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.024277 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.037805 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.049963 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.055644 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.055684 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.055697 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.055718 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.055734 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:28Z","lastTransitionTime":"2025-12-10T06:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.062672 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.073688 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.086475 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.097802 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.111769 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.121552 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.134765 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.158380 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.158424 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.158433 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.158447 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.158459 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:28Z","lastTransitionTime":"2025-12-10T06:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.260603 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.260638 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.260648 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.260663 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.260673 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:28Z","lastTransitionTime":"2025-12-10T06:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.362928 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.362966 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.362976 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.362989 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.362999 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:28Z","lastTransitionTime":"2025-12-10T06:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.465617 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.465685 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.465695 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.465709 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.465718 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:28Z","lastTransitionTime":"2025-12-10T06:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.568408 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.568442 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.568451 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.568464 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.568473 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:28Z","lastTransitionTime":"2025-12-10T06:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.588946 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.589001 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:28 crc kubenswrapper[4765]: E1210 06:48:28.589102 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:28 crc kubenswrapper[4765]: E1210 06:48:28.589139 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.671045 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.671113 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.671128 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.671146 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.671159 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:28Z","lastTransitionTime":"2025-12-10T06:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.752014 4765 generic.go:334] "Generic (PLEG): container finished" podID="51cca3f8-b6e3-4c05-a289-32192e52215a" containerID="c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551" exitCode=0 Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.752110 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" event={"ID":"51cca3f8-b6e3-4c05-a289-32192e52215a","Type":"ContainerDied","Data":"c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551"} Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.752562 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.772992 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.773651 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.773677 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.773687 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.773704 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.773715 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:28Z","lastTransitionTime":"2025-12-10T06:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.788117 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.788907 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.800436 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.811784 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.823342 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.833405 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.846434 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.857317 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.871377 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.878935 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.878960 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.878969 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.878981 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.878991 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:28Z","lastTransitionTime":"2025-12-10T06:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.885187 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.898328 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.911761 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.922165 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.942218 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5285c04913af0c8f19436f9775bb92fa805ae017667d065546b08c58770af4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.984416 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.984455 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.984482 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.984498 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.984507 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:28Z","lastTransitionTime":"2025-12-10T06:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:28 crc kubenswrapper[4765]: I1210 06:48:28.987827 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5285c04913af0c8f19436f9775bb92fa805ae017667d065546b08c58770af4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:28Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.012696 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.026980 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.039370 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.048647 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.062688 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.075327 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.086760 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.086789 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.086817 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.086834 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.086845 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:29Z","lastTransitionTime":"2025-12-10T06:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.088996 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.103742 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.117513 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.133610 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.147920 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.160989 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.177339 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.189437 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.189480 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.189494 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.189515 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.189529 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:29Z","lastTransitionTime":"2025-12-10T06:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.291945 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.292018 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.292029 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.292049 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.292062 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:29Z","lastTransitionTime":"2025-12-10T06:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.395314 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.395383 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.395396 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.395418 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.395450 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:29Z","lastTransitionTime":"2025-12-10T06:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.500618 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.501030 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.501051 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.501069 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.501080 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:29Z","lastTransitionTime":"2025-12-10T06:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.588378 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:29 crc kubenswrapper[4765]: E1210 06:48:29.588537 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.603366 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.603412 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.603424 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.603442 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.603453 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:29Z","lastTransitionTime":"2025-12-10T06:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.705453 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.705510 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.705522 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.705539 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.705550 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:29Z","lastTransitionTime":"2025-12-10T06:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.757985 4765 generic.go:334] "Generic (PLEG): container finished" podID="51cca3f8-b6e3-4c05-a289-32192e52215a" containerID="00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e" exitCode=0 Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.758244 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" event={"ID":"51cca3f8-b6e3-4c05-a289-32192e52215a","Type":"ContainerDied","Data":"00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e"} Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.771240 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.788741 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5285c04913af0c8f19436f9775bb92fa805ae017667d065546b08c58770af4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.803934 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.807256 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.807286 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.807295 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.807310 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.807319 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:29Z","lastTransitionTime":"2025-12-10T06:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.887653 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.901317 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.909156 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.909184 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.909192 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.909206 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.909215 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:29Z","lastTransitionTime":"2025-12-10T06:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.913507 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.925365 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.939173 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.955134 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.969431 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.980425 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:29 crc kubenswrapper[4765]: I1210 06:48:29.991711 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:29Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.004548 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.012163 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.012214 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.012257 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.012278 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.012294 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:30Z","lastTransitionTime":"2025-12-10T06:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.022655 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.114634 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.114681 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.114692 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.114713 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.114724 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:30Z","lastTransitionTime":"2025-12-10T06:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.216812 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.216855 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.216867 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.216887 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.216903 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:30Z","lastTransitionTime":"2025-12-10T06:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.318936 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.319007 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.319019 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.319040 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.319053 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:30Z","lastTransitionTime":"2025-12-10T06:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.422670 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.422725 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.422737 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.422757 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.422770 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:30Z","lastTransitionTime":"2025-12-10T06:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.524974 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.525015 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.525028 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.525047 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.525061 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:30Z","lastTransitionTime":"2025-12-10T06:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.588876 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:30 crc kubenswrapper[4765]: E1210 06:48:30.588993 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.589076 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:30 crc kubenswrapper[4765]: E1210 06:48:30.589264 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.607913 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5285c04913af0c8f19436f9775bb92fa805ae017667d065546b08c58770af4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.620631 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.626850 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.626895 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.626905 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.626925 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.626936 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:30Z","lastTransitionTime":"2025-12-10T06:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.633043 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.644552 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.656769 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.670875 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.683430 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.701355 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.713305 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.726414 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.728986 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.729028 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.729038 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.729055 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.729065 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:30Z","lastTransitionTime":"2025-12-10T06:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.737513 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.752424 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.764830 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.765112 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" event={"ID":"51cca3f8-b6e3-4c05-a289-32192e52215a","Type":"ContainerStarted","Data":"5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714"} Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.767482 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovnkube-controller/0.log" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.769948 4765 generic.go:334] "Generic (PLEG): container finished" podID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerID="f5285c04913af0c8f19436f9775bb92fa805ae017667d065546b08c58770af4e" exitCode=1 Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.769988 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerDied","Data":"f5285c04913af0c8f19436f9775bb92fa805ae017667d065546b08c58770af4e"} Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.770627 4765 scope.go:117] "RemoveContainer" containerID="f5285c04913af0c8f19436f9775bb92fa805ae017667d065546b08c58770af4e" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.779684 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.793580 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.806270 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.817712 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.829834 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.830938 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.830985 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.830997 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.831014 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.831025 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:30Z","lastTransitionTime":"2025-12-10T06:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.842139 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.857022 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.871354 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.882650 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.894545 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.904761 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.923448 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5285c04913af0c8f19436f9775bb92fa805ae017667d065546b08c58770af4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f5285c04913af0c8f19436f9775bb92fa805ae017667d065546b08c58770af4e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"message\\\":\\\"1.Namespace event handler 5 for removal\\\\nI1210 06:48:30.398561 6051 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1210 06:48:30.398103 6051 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 06:48:30.398584 6051 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 06:48:30.398596 6051 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1210 06:48:30.398142 6051 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 06:48:30.398176 6051 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 06:48:30.398566 6051 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1210 06:48:30.399250 6051 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1210 06:48:30.399282 6051 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1210 06:48:30.399306 6051 factory.go:656] Stopping watch factory\\\\nI1210 06:48:30.399316 6051 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1210 06:48:30.399326 6051 handler.go:208] Removed *v1.Node event handler 7\\\\nI1210 06:48:30.399352 6051 handler.go:208] Removed *v1.Node event handler 2\\\\nI1210 06:48:30.399332 6051 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.933978 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.934004 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.934012 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.934026 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.934039 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:30Z","lastTransitionTime":"2025-12-10T06:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.935586 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.947264 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:30 crc kubenswrapper[4765]: I1210 06:48:30.960427 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:30Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.036744 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.036802 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.036818 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.036836 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.036849 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:31Z","lastTransitionTime":"2025-12-10T06:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.139609 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.139653 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.139662 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.139679 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.139725 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:31Z","lastTransitionTime":"2025-12-10T06:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.242158 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.242199 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.242211 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.242231 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.242244 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:31Z","lastTransitionTime":"2025-12-10T06:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.345636 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.345675 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.345692 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.345708 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.345721 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:31Z","lastTransitionTime":"2025-12-10T06:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.447525 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.447608 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.447619 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.447634 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.447643 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:31Z","lastTransitionTime":"2025-12-10T06:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.550930 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.550989 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.551003 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.551024 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.551038 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:31Z","lastTransitionTime":"2025-12-10T06:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.588590 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:31 crc kubenswrapper[4765]: E1210 06:48:31.588726 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.654379 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.654847 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.654862 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.654879 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.654891 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:31Z","lastTransitionTime":"2025-12-10T06:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:31 crc kubenswrapper[4765]: E1210 06:48:31.669340 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.674731 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.674780 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.674792 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.674809 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.674820 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:31Z","lastTransitionTime":"2025-12-10T06:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:31 crc kubenswrapper[4765]: E1210 06:48:31.688681 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.692780 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.692829 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.692839 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.692857 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.692868 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:31Z","lastTransitionTime":"2025-12-10T06:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:31 crc kubenswrapper[4765]: E1210 06:48:31.706204 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.709187 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.709229 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.709245 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.709260 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.709272 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:31Z","lastTransitionTime":"2025-12-10T06:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:31 crc kubenswrapper[4765]: E1210 06:48:31.720831 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.724937 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.724976 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.724998 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.725014 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.725024 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:31Z","lastTransitionTime":"2025-12-10T06:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:31 crc kubenswrapper[4765]: E1210 06:48:31.736399 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: E1210 06:48:31.736643 4765 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.738027 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.738059 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.738071 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.738101 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.738110 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:31Z","lastTransitionTime":"2025-12-10T06:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.774326 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovnkube-controller/0.log" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.777338 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerStarted","Data":"a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a"} Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.777717 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.791554 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.801535 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.812537 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.822383 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.832254 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.840071 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.840116 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.840127 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.840141 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.840151 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:31Z","lastTransitionTime":"2025-12-10T06:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.842063 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.853744 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.865079 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.879829 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.890831 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.901395 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.913868 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.924511 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.942061 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f5285c04913af0c8f19436f9775bb92fa805ae017667d065546b08c58770af4e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"message\\\":\\\"1.Namespace event handler 5 for removal\\\\nI1210 06:48:30.398561 6051 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1210 06:48:30.398103 6051 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 06:48:30.398584 6051 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 06:48:30.398596 6051 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1210 06:48:30.398142 6051 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 06:48:30.398176 6051 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 06:48:30.398566 6051 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1210 06:48:30.399250 6051 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1210 06:48:30.399282 6051 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1210 06:48:30.399306 6051 factory.go:656] Stopping watch factory\\\\nI1210 06:48:30.399316 6051 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1210 06:48:30.399326 6051 handler.go:208] Removed *v1.Node event handler 7\\\\nI1210 06:48:30.399352 6051 handler.go:208] Removed *v1.Node event handler 2\\\\nI1210 06:48:30.399332 6051 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.942894 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.942933 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.942945 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.942963 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:31 crc kubenswrapper[4765]: I1210 06:48:31.942975 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:31Z","lastTransitionTime":"2025-12-10T06:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.045364 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.045402 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.045410 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.045425 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.045435 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:32Z","lastTransitionTime":"2025-12-10T06:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.147524 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.147567 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.147578 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.147593 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.147605 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:32Z","lastTransitionTime":"2025-12-10T06:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.249466 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.249527 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.249540 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.249555 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.249566 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:32Z","lastTransitionTime":"2025-12-10T06:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.352147 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.352189 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.352204 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.352222 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.352231 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:32Z","lastTransitionTime":"2025-12-10T06:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.454917 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.456602 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.456619 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.456646 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.456676 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:32Z","lastTransitionTime":"2025-12-10T06:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.559821 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.559888 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.559904 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.559922 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.559936 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:32Z","lastTransitionTime":"2025-12-10T06:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.588228 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:32 crc kubenswrapper[4765]: E1210 06:48:32.588385 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.588507 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:32 crc kubenswrapper[4765]: E1210 06:48:32.588655 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.603748 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng"] Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.604190 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.609462 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.617004 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.619107 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.631568 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.642908 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.654838 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.662051 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.662298 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.662389 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.662501 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.662621 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:32Z","lastTransitionTime":"2025-12-10T06:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.667149 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.677710 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.687026 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.698580 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.707875 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/af4ada46-5cbb-4675-9e5a-4abf08bbea89-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-whkng\" (UID: \"af4ada46-5cbb-4675-9e5a-4abf08bbea89\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.707936 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/af4ada46-5cbb-4675-9e5a-4abf08bbea89-env-overrides\") pod \"ovnkube-control-plane-749d76644c-whkng\" (UID: \"af4ada46-5cbb-4675-9e5a-4abf08bbea89\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.707991 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/af4ada46-5cbb-4675-9e5a-4abf08bbea89-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-whkng\" (UID: \"af4ada46-5cbb-4675-9e5a-4abf08bbea89\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.708019 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cr2vq\" (UniqueName: \"kubernetes.io/projected/af4ada46-5cbb-4675-9e5a-4abf08bbea89-kube-api-access-cr2vq\") pod \"ovnkube-control-plane-749d76644c-whkng\" (UID: \"af4ada46-5cbb-4675-9e5a-4abf08bbea89\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.711073 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.724770 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.736621 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.747192 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.757046 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.765195 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.765230 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.765243 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.765259 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.765269 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:32Z","lastTransitionTime":"2025-12-10T06:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.773868 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f5285c04913af0c8f19436f9775bb92fa805ae017667d065546b08c58770af4e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"message\\\":\\\"1.Namespace event handler 5 for removal\\\\nI1210 06:48:30.398561 6051 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1210 06:48:30.398103 6051 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 06:48:30.398584 6051 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 06:48:30.398596 6051 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1210 06:48:30.398142 6051 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 06:48:30.398176 6051 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 06:48:30.398566 6051 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1210 06:48:30.399250 6051 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1210 06:48:30.399282 6051 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1210 06:48:30.399306 6051 factory.go:656] Stopping watch factory\\\\nI1210 06:48:30.399316 6051 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1210 06:48:30.399326 6051 handler.go:208] Removed *v1.Node event handler 7\\\\nI1210 06:48:30.399352 6051 handler.go:208] Removed *v1.Node event handler 2\\\\nI1210 06:48:30.399332 6051 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.780897 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovnkube-controller/1.log" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.781272 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovnkube-controller/0.log" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.783012 4765 generic.go:334] "Generic (PLEG): container finished" podID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerID="a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a" exitCode=1 Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.783040 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerDied","Data":"a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a"} Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.783075 4765 scope.go:117] "RemoveContainer" containerID="f5285c04913af0c8f19436f9775bb92fa805ae017667d065546b08c58770af4e" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.783624 4765 scope.go:117] "RemoveContainer" containerID="a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a" Dec 10 06:48:32 crc kubenswrapper[4765]: E1210 06:48:32.783757 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.787937 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.799701 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.808646 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/af4ada46-5cbb-4675-9e5a-4abf08bbea89-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-whkng\" (UID: \"af4ada46-5cbb-4675-9e5a-4abf08bbea89\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.808698 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/af4ada46-5cbb-4675-9e5a-4abf08bbea89-env-overrides\") pod \"ovnkube-control-plane-749d76644c-whkng\" (UID: \"af4ada46-5cbb-4675-9e5a-4abf08bbea89\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.808745 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/af4ada46-5cbb-4675-9e5a-4abf08bbea89-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-whkng\" (UID: \"af4ada46-5cbb-4675-9e5a-4abf08bbea89\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.808768 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cr2vq\" (UniqueName: \"kubernetes.io/projected/af4ada46-5cbb-4675-9e5a-4abf08bbea89-kube-api-access-cr2vq\") pod \"ovnkube-control-plane-749d76644c-whkng\" (UID: \"af4ada46-5cbb-4675-9e5a-4abf08bbea89\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.810023 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/af4ada46-5cbb-4675-9e5a-4abf08bbea89-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-whkng\" (UID: \"af4ada46-5cbb-4675-9e5a-4abf08bbea89\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.811275 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.811342 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/af4ada46-5cbb-4675-9e5a-4abf08bbea89-env-overrides\") pod \"ovnkube-control-plane-749d76644c-whkng\" (UID: \"af4ada46-5cbb-4675-9e5a-4abf08bbea89\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.816464 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/af4ada46-5cbb-4675-9e5a-4abf08bbea89-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-whkng\" (UID: \"af4ada46-5cbb-4675-9e5a-4abf08bbea89\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.822901 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.824399 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cr2vq\" (UniqueName: \"kubernetes.io/projected/af4ada46-5cbb-4675-9e5a-4abf08bbea89-kube-api-access-cr2vq\") pod \"ovnkube-control-plane-749d76644c-whkng\" (UID: \"af4ada46-5cbb-4675-9e5a-4abf08bbea89\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.833006 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.843538 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.854865 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.865746 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.867080 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.867118 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.867126 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.867139 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.867148 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:32Z","lastTransitionTime":"2025-12-10T06:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.878053 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.888878 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.903610 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.917073 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.922497 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f5285c04913af0c8f19436f9775bb92fa805ae017667d065546b08c58770af4e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"message\\\":\\\"1.Namespace event handler 5 for removal\\\\nI1210 06:48:30.398561 6051 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1210 06:48:30.398103 6051 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 06:48:30.398584 6051 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 06:48:30.398596 6051 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1210 06:48:30.398142 6051 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 06:48:30.398176 6051 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 06:48:30.398566 6051 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1210 06:48:30.399250 6051 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1210 06:48:30.399282 6051 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1210 06:48:30.399306 6051 factory.go:656] Stopping watch factory\\\\nI1210 06:48:30.399316 6051 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1210 06:48:30.399326 6051 handler.go:208] Removed *v1.Node event handler 7\\\\nI1210 06:48:30.399352 6051 handler.go:208] Removed *v1.Node event handler 2\\\\nI1210 06:48:30.399332 6051 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcbf20 0x1fcbc00 0x1fcbba0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z]\\\\nI1210 06:48:31.601251 6230 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.935863 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.950055 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.962905 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.969211 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.969252 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.969261 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.969276 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.969287 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:32Z","lastTransitionTime":"2025-12-10T06:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:32 crc kubenswrapper[4765]: I1210 06:48:32.973482 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:32Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.071416 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.071456 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.071468 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.071484 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.071494 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:33Z","lastTransitionTime":"2025-12-10T06:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.173224 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.173256 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.173263 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.173279 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.173291 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:33Z","lastTransitionTime":"2025-12-10T06:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.275398 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.275629 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.275701 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.275769 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.275836 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:33Z","lastTransitionTime":"2025-12-10T06:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.377819 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.377864 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.377879 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.377892 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.377899 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:33Z","lastTransitionTime":"2025-12-10T06:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.479411 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.479445 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.479454 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.479467 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.479477 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:33Z","lastTransitionTime":"2025-12-10T06:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.581701 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.581734 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.581743 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.581756 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.581765 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:33Z","lastTransitionTime":"2025-12-10T06:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.588153 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:33 crc kubenswrapper[4765]: E1210 06:48:33.588297 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.684053 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.684108 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.684127 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.684144 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.684156 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:33Z","lastTransitionTime":"2025-12-10T06:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.785534 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.785570 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.785579 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.785594 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.785604 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:33Z","lastTransitionTime":"2025-12-10T06:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.788454 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovnkube-controller/1.log" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.791176 4765 scope.go:117] "RemoveContainer" containerID="a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a" Dec 10 06:48:33 crc kubenswrapper[4765]: E1210 06:48:33.791315 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.791842 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" event={"ID":"af4ada46-5cbb-4675-9e5a-4abf08bbea89","Type":"ContainerStarted","Data":"8c9fc51cd2b956f88dd328f4808ae630f338b157c5a3ceb5e12117ad38f30bbe"} Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.791884 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" event={"ID":"af4ada46-5cbb-4675-9e5a-4abf08bbea89","Type":"ContainerStarted","Data":"d2eb316210ad53b324b68590c3e4227683bbe57ebce9ea6731a3b7a0a1a6a37e"} Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.791898 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" event={"ID":"af4ada46-5cbb-4675-9e5a-4abf08bbea89","Type":"ContainerStarted","Data":"a04348b0fd76ee32b611149b7f74d7546198baba43e79d216940a9a1320fb91e"} Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.803887 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:33Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.820714 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcbf20 0x1fcbc00 0x1fcbba0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z]\\\\nI1210 06:48:31.601251 6230 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:33Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.833207 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:33Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.844193 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:33Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.856272 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:33Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.867465 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:33Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.878546 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:33Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.887748 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.887783 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.887796 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.887819 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.887831 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:33Z","lastTransitionTime":"2025-12-10T06:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.891758 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:33Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.903632 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:33Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.916730 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:33Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.929119 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:33Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.938759 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:33Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.950740 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:33Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.962464 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:33Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.975914 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:33Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.985415 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:33Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.992368 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.992400 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.992409 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.992423 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:33 crc kubenswrapper[4765]: I1210 06:48:33.992431 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:33Z","lastTransitionTime":"2025-12-10T06:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.000966 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:33Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.011446 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.020375 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-k9sld"] Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.020799 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.020868 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.025477 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.037189 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.047464 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.058785 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.068114 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.084678 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcbf20 0x1fcbc00 0x1fcbba0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z]\\\\nI1210 06:48:31.601251 6230 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.095001 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.095032 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.095041 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.095054 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.095065 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:34Z","lastTransitionTime":"2025-12-10T06:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.097330 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.107007 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.115577 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2eb316210ad53b324b68590c3e4227683bbe57ebce9ea6731a3b7a0a1a6a37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9fc51cd2b956f88dd328f4808ae630f338b157c5a3ceb5e12117ad38f30bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.123676 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxxpc\" (UniqueName: \"kubernetes.io/projected/efb71311-50ec-4765-8caf-6f2e02b8dce9-kube-api-access-pxxpc\") pod \"network-metrics-daemon-k9sld\" (UID: \"efb71311-50ec-4765-8caf-6f2e02b8dce9\") " pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.123725 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs\") pod \"network-metrics-daemon-k9sld\" (UID: \"efb71311-50ec-4765-8caf-6f2e02b8dce9\") " pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.127119 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.137867 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.149857 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.161375 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.170815 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.181165 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.189895 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2eb316210ad53b324b68590c3e4227683bbe57ebce9ea6731a3b7a0a1a6a37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9fc51cd2b956f88dd328f4808ae630f338b157c5a3ceb5e12117ad38f30bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.196836 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.197437 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.197452 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.197468 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.198218 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:34Z","lastTransitionTime":"2025-12-10T06:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.200333 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.211629 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.219852 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.224806 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxxpc\" (UniqueName: \"kubernetes.io/projected/efb71311-50ec-4765-8caf-6f2e02b8dce9-kube-api-access-pxxpc\") pod \"network-metrics-daemon-k9sld\" (UID: \"efb71311-50ec-4765-8caf-6f2e02b8dce9\") " pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.224843 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs\") pod \"network-metrics-daemon-k9sld\" (UID: \"efb71311-50ec-4765-8caf-6f2e02b8dce9\") " pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.224959 4765 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.225004 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs podName:efb71311-50ec-4765-8caf-6f2e02b8dce9 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:34.724991926 +0000 UTC m=+34.451657242 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs") pod "network-metrics-daemon-k9sld" (UID: "efb71311-50ec-4765-8caf-6f2e02b8dce9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.230219 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.239020 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.239968 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxxpc\" (UniqueName: \"kubernetes.io/projected/efb71311-50ec-4765-8caf-6f2e02b8dce9-kube-api-access-pxxpc\") pod \"network-metrics-daemon-k9sld\" (UID: \"efb71311-50ec-4765-8caf-6f2e02b8dce9\") " pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.252221 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.267153 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcbf20 0x1fcbc00 0x1fcbba0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z]\\\\nI1210 06:48:31.601251 6230 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.276440 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k9sld" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efb71311-50ec-4765-8caf-6f2e02b8dce9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k9sld\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.287037 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.297260 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.300527 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.300558 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.300566 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.300580 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.300590 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:34Z","lastTransitionTime":"2025-12-10T06:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.309254 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.325914 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.326052 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.326109 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.326132 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.326347 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.326376 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.326391 4765 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.326440 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:50.326424611 +0000 UTC m=+50.053089927 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.326974 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:48:50.326960286 +0000 UTC m=+50.053625602 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.327071 4765 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.327136 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:50.327124761 +0000 UTC m=+50.053790077 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.327209 4765 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.327242 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:50.327231894 +0000 UTC m=+50.053897210 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.328299 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:34Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.402636 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.402679 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.402690 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.402706 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.402716 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:34Z","lastTransitionTime":"2025-12-10T06:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.426867 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.427078 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.427126 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.427137 4765 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.427191 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:50.427176886 +0000 UTC m=+50.153842202 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.505220 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.505268 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.505280 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.505298 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.505312 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:34Z","lastTransitionTime":"2025-12-10T06:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.588625 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.588767 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.589436 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.589573 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.608238 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.608537 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.608559 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.608578 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.608597 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:34Z","lastTransitionTime":"2025-12-10T06:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.711283 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.711497 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.711596 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.711666 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.711722 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:34Z","lastTransitionTime":"2025-12-10T06:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.729873 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs\") pod \"network-metrics-daemon-k9sld\" (UID: \"efb71311-50ec-4765-8caf-6f2e02b8dce9\") " pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.730232 4765 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 06:48:34 crc kubenswrapper[4765]: E1210 06:48:34.730367 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs podName:efb71311-50ec-4765-8caf-6f2e02b8dce9 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:35.730350579 +0000 UTC m=+35.457015895 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs") pod "network-metrics-daemon-k9sld" (UID: "efb71311-50ec-4765-8caf-6f2e02b8dce9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.814253 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.814287 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.814295 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.814317 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.814334 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:34Z","lastTransitionTime":"2025-12-10T06:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.917639 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.917690 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.917705 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.917723 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:34 crc kubenswrapper[4765]: I1210 06:48:34.917737 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:34Z","lastTransitionTime":"2025-12-10T06:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.020928 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.020979 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.020994 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.021017 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.021032 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:35Z","lastTransitionTime":"2025-12-10T06:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.122982 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.123014 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.123024 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.123037 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.123047 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:35Z","lastTransitionTime":"2025-12-10T06:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.226177 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.226224 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.226235 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.226254 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.226266 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:35Z","lastTransitionTime":"2025-12-10T06:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.329557 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.329609 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.329620 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.329640 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.329654 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:35Z","lastTransitionTime":"2025-12-10T06:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.432132 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.432193 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.432203 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.432220 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.432232 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:35Z","lastTransitionTime":"2025-12-10T06:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.534606 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.534650 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.534661 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.534677 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.534689 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:35Z","lastTransitionTime":"2025-12-10T06:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.588943 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.588974 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:35 crc kubenswrapper[4765]: E1210 06:48:35.589075 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:48:35 crc kubenswrapper[4765]: E1210 06:48:35.589182 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.637310 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.637352 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.637363 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.637379 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.637391 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:35Z","lastTransitionTime":"2025-12-10T06:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.739071 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs\") pod \"network-metrics-daemon-k9sld\" (UID: \"efb71311-50ec-4765-8caf-6f2e02b8dce9\") " pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:35 crc kubenswrapper[4765]: E1210 06:48:35.739241 4765 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 06:48:35 crc kubenswrapper[4765]: E1210 06:48:35.739307 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs podName:efb71311-50ec-4765-8caf-6f2e02b8dce9 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:37.739289574 +0000 UTC m=+37.465954890 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs") pod "network-metrics-daemon-k9sld" (UID: "efb71311-50ec-4765-8caf-6f2e02b8dce9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.739679 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.739739 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.739755 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.739776 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.739787 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:35Z","lastTransitionTime":"2025-12-10T06:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.787263 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.807364 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:35Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.820281 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:35Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.830888 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:35Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.841443 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.841474 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.841483 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.841497 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.841509 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:35Z","lastTransitionTime":"2025-12-10T06:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.843724 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:35Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.856226 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:35Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.871221 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:35Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.885770 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:35Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.899726 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:35Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.913527 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:35Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.924264 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:35Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.944410 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.944462 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.944474 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.944495 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.944510 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:35Z","lastTransitionTime":"2025-12-10T06:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.944536 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcbf20 0x1fcbc00 0x1fcbba0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z]\\\\nI1210 06:48:31.601251 6230 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:35Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.958348 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k9sld" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efb71311-50ec-4765-8caf-6f2e02b8dce9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k9sld\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:35Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.971805 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:35Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.984743 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:35Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:35 crc kubenswrapper[4765]: I1210 06:48:35.997227 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:35Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.009010 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2eb316210ad53b324b68590c3e4227683bbe57ebce9ea6731a3b7a0a1a6a37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9fc51cd2b956f88dd328f4808ae630f338b157c5a3ceb5e12117ad38f30bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:36Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.047587 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.047630 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.047640 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.047658 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.047671 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:36Z","lastTransitionTime":"2025-12-10T06:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.151724 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.152067 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.152222 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.152310 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.152381 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:36Z","lastTransitionTime":"2025-12-10T06:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.256617 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.257262 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.257405 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.257512 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.257583 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:36Z","lastTransitionTime":"2025-12-10T06:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.360770 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.360840 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.360853 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.360872 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.360886 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:36Z","lastTransitionTime":"2025-12-10T06:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.464583 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.464643 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.464655 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.464675 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.464694 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:36Z","lastTransitionTime":"2025-12-10T06:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.567400 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.567763 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.567897 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.568064 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.568201 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:36Z","lastTransitionTime":"2025-12-10T06:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.588778 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.588786 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:36 crc kubenswrapper[4765]: E1210 06:48:36.588929 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:36 crc kubenswrapper[4765]: E1210 06:48:36.589014 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.670561 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.670638 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.670661 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.670694 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.670714 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:36Z","lastTransitionTime":"2025-12-10T06:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.773644 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.773704 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.773719 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.773737 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.773751 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:36Z","lastTransitionTime":"2025-12-10T06:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.877302 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.877370 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.877382 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.877408 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.877425 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:36Z","lastTransitionTime":"2025-12-10T06:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.980493 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.980585 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.980600 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.980629 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:36 crc kubenswrapper[4765]: I1210 06:48:36.980644 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:36Z","lastTransitionTime":"2025-12-10T06:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.083531 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.083574 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.083583 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.083603 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.083615 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:37Z","lastTransitionTime":"2025-12-10T06:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.185562 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.185614 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.185625 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.185639 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.185650 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:37Z","lastTransitionTime":"2025-12-10T06:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.287865 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.288146 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.288210 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.288269 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.288338 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:37Z","lastTransitionTime":"2025-12-10T06:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.390519 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.390563 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.390579 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.390599 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.390612 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:37Z","lastTransitionTime":"2025-12-10T06:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.493638 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.493884 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.493979 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.494043 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.494135 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:37Z","lastTransitionTime":"2025-12-10T06:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.588845 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.588845 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:37 crc kubenswrapper[4765]: E1210 06:48:37.589148 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:37 crc kubenswrapper[4765]: E1210 06:48:37.589075 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.599108 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.599538 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.599599 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.599633 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.599653 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:37Z","lastTransitionTime":"2025-12-10T06:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.703105 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.703173 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.703187 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.703213 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.703230 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:37Z","lastTransitionTime":"2025-12-10T06:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.758965 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs\") pod \"network-metrics-daemon-k9sld\" (UID: \"efb71311-50ec-4765-8caf-6f2e02b8dce9\") " pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:37 crc kubenswrapper[4765]: E1210 06:48:37.759337 4765 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 06:48:37 crc kubenswrapper[4765]: E1210 06:48:37.759513 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs podName:efb71311-50ec-4765-8caf-6f2e02b8dce9 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:41.759479519 +0000 UTC m=+41.486144845 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs") pod "network-metrics-daemon-k9sld" (UID: "efb71311-50ec-4765-8caf-6f2e02b8dce9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.805690 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.805738 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.805749 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.805763 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.805774 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:37Z","lastTransitionTime":"2025-12-10T06:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.908811 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.908857 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.908865 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.908879 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:37 crc kubenswrapper[4765]: I1210 06:48:37.908889 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:37Z","lastTransitionTime":"2025-12-10T06:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.011021 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.011268 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.011328 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.011393 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.011473 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:38Z","lastTransitionTime":"2025-12-10T06:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.113945 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.114198 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.114272 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.114349 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.114421 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:38Z","lastTransitionTime":"2025-12-10T06:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.217320 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.217364 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.217373 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.217388 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.217397 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:38Z","lastTransitionTime":"2025-12-10T06:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.319945 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.320044 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.320116 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.320182 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.320206 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:38Z","lastTransitionTime":"2025-12-10T06:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.422492 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.422526 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.422535 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.422581 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.422593 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:38Z","lastTransitionTime":"2025-12-10T06:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.525431 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.525494 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.525506 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.525528 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.525540 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:38Z","lastTransitionTime":"2025-12-10T06:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.588176 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.588243 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:38 crc kubenswrapper[4765]: E1210 06:48:38.588438 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:38 crc kubenswrapper[4765]: E1210 06:48:38.588586 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.627578 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.627628 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.627637 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.627652 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.627661 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:38Z","lastTransitionTime":"2025-12-10T06:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.729786 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.729828 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.729837 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.729851 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.729861 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:38Z","lastTransitionTime":"2025-12-10T06:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.832263 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.832310 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.832322 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.832338 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.832349 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:38Z","lastTransitionTime":"2025-12-10T06:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.935864 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.935933 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.935949 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.935973 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:38 crc kubenswrapper[4765]: I1210 06:48:38.935995 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:38Z","lastTransitionTime":"2025-12-10T06:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.039512 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.039548 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.039560 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.039575 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.039585 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:39Z","lastTransitionTime":"2025-12-10T06:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.141910 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.141953 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.141963 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.141976 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.141985 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:39Z","lastTransitionTime":"2025-12-10T06:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.244371 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.244397 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.244405 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.244420 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.244429 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:39Z","lastTransitionTime":"2025-12-10T06:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.347544 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.347680 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.347697 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.347722 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.347739 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:39Z","lastTransitionTime":"2025-12-10T06:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.450633 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.450666 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.450676 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.450691 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.450701 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:39Z","lastTransitionTime":"2025-12-10T06:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.552809 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.552851 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.552863 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.552877 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.552888 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:39Z","lastTransitionTime":"2025-12-10T06:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.588705 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.588719 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:39 crc kubenswrapper[4765]: E1210 06:48:39.588857 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:48:39 crc kubenswrapper[4765]: E1210 06:48:39.588945 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.655492 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.655517 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.655524 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.655537 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.655546 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:39Z","lastTransitionTime":"2025-12-10T06:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.757377 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.757409 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.757417 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.757431 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.757443 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:39Z","lastTransitionTime":"2025-12-10T06:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.859875 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.859943 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.859953 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.859967 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.859976 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:39Z","lastTransitionTime":"2025-12-10T06:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.962726 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.962770 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.962783 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.962800 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:39 crc kubenswrapper[4765]: I1210 06:48:39.962813 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:39Z","lastTransitionTime":"2025-12-10T06:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.064928 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.064981 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.064990 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.065005 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.065014 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:40Z","lastTransitionTime":"2025-12-10T06:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.168169 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.168221 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.168243 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.168263 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.168280 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:40Z","lastTransitionTime":"2025-12-10T06:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.271096 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.271130 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.271141 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.271158 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.271170 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:40Z","lastTransitionTime":"2025-12-10T06:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.373680 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.373719 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.373730 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.373747 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.373757 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:40Z","lastTransitionTime":"2025-12-10T06:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.475611 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.475651 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.475659 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.475675 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.475685 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:40Z","lastTransitionTime":"2025-12-10T06:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.578393 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.578430 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.578441 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.578457 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.578467 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:40Z","lastTransitionTime":"2025-12-10T06:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.587937 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.588002 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:40 crc kubenswrapper[4765]: E1210 06:48:40.588061 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:40 crc kubenswrapper[4765]: E1210 06:48:40.588135 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.601201 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:40Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.615045 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:40Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.625687 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:40Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.637885 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:40Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.648109 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:40Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.663651 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:40Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.673725 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k9sld" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efb71311-50ec-4765-8caf-6f2e02b8dce9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k9sld\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:40Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.680638 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.680669 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.680677 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.680690 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.680700 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:40Z","lastTransitionTime":"2025-12-10T06:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.686531 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:40Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.697707 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:40Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.710310 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:40Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.720850 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:40Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.741534 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcbf20 0x1fcbc00 0x1fcbba0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z]\\\\nI1210 06:48:31.601251 6230 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:40Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.756309 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:40Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.772174 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:40Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.786836 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.786889 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.786897 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.786914 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.786927 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:40Z","lastTransitionTime":"2025-12-10T06:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.790306 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:40Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.807779 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2eb316210ad53b324b68590c3e4227683bbe57ebce9ea6731a3b7a0a1a6a37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9fc51cd2b956f88dd328f4808ae630f338b157c5a3ceb5e12117ad38f30bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:40Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.890404 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.890479 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.890513 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.890536 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.890548 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:40Z","lastTransitionTime":"2025-12-10T06:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.992996 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.993036 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.993045 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.993061 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:40 crc kubenswrapper[4765]: I1210 06:48:40.993072 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:40Z","lastTransitionTime":"2025-12-10T06:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.097979 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.098018 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.098025 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.098039 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.098048 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:41Z","lastTransitionTime":"2025-12-10T06:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.200273 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.200311 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.200331 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.200352 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.200362 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:41Z","lastTransitionTime":"2025-12-10T06:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.302677 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.302722 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.302733 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.302748 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.302771 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:41Z","lastTransitionTime":"2025-12-10T06:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.405874 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.405993 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.406016 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.406045 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.406065 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:41Z","lastTransitionTime":"2025-12-10T06:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.508531 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.508570 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.508581 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.508598 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.508609 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:41Z","lastTransitionTime":"2025-12-10T06:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.588565 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.588602 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:41 crc kubenswrapper[4765]: E1210 06:48:41.588733 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:48:41 crc kubenswrapper[4765]: E1210 06:48:41.588822 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.611879 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.611935 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.611946 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.611964 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.611974 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:41Z","lastTransitionTime":"2025-12-10T06:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.714615 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.714683 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.714695 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.714712 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.714723 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:41Z","lastTransitionTime":"2025-12-10T06:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.800047 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs\") pod \"network-metrics-daemon-k9sld\" (UID: \"efb71311-50ec-4765-8caf-6f2e02b8dce9\") " pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:41 crc kubenswrapper[4765]: E1210 06:48:41.800292 4765 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 06:48:41 crc kubenswrapper[4765]: E1210 06:48:41.800438 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs podName:efb71311-50ec-4765-8caf-6f2e02b8dce9 nodeName:}" failed. No retries permitted until 2025-12-10 06:48:49.800407436 +0000 UTC m=+49.527072762 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs") pod "network-metrics-daemon-k9sld" (UID: "efb71311-50ec-4765-8caf-6f2e02b8dce9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.813728 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.813779 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.813793 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.813821 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.813837 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:41Z","lastTransitionTime":"2025-12-10T06:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:41 crc kubenswrapper[4765]: E1210 06:48:41.832393 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:41Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.836406 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.836449 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.836459 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.836480 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:41 crc kubenswrapper[4765]: I1210 06:48:41.836491 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:41Z","lastTransitionTime":"2025-12-10T06:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:42 crc kubenswrapper[4765]: E1210 06:48:42.356487 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:42Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.361141 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.361189 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.361201 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.361223 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.361236 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:42Z","lastTransitionTime":"2025-12-10T06:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:42 crc kubenswrapper[4765]: E1210 06:48:42.374648 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:42Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.379752 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.379813 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.379826 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.379845 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.379855 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:42Z","lastTransitionTime":"2025-12-10T06:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:42 crc kubenswrapper[4765]: E1210 06:48:42.394787 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:42Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.398998 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.399041 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.399054 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.399069 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.399079 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:42Z","lastTransitionTime":"2025-12-10T06:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:42 crc kubenswrapper[4765]: E1210 06:48:42.412874 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:42Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:42 crc kubenswrapper[4765]: E1210 06:48:42.412992 4765 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.415655 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.415695 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.415704 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.415719 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.415728 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:42Z","lastTransitionTime":"2025-12-10T06:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.518615 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.518669 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.518681 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.518706 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.518717 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:42Z","lastTransitionTime":"2025-12-10T06:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.588484 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:42 crc kubenswrapper[4765]: E1210 06:48:42.588756 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.589045 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:42 crc kubenswrapper[4765]: E1210 06:48:42.589346 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.622192 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.622243 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.622259 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.622285 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.622300 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:42Z","lastTransitionTime":"2025-12-10T06:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.725321 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.725372 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.725384 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.725402 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.725414 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:42Z","lastTransitionTime":"2025-12-10T06:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.827800 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.827865 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.827883 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.827913 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.827933 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:42Z","lastTransitionTime":"2025-12-10T06:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.930799 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.930835 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.930844 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.930858 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:42 crc kubenswrapper[4765]: I1210 06:48:42.930866 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:42Z","lastTransitionTime":"2025-12-10T06:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.034720 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.034777 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.034788 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.034812 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.034825 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:43Z","lastTransitionTime":"2025-12-10T06:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.138692 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.138739 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.138748 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.138766 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.138788 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:43Z","lastTransitionTime":"2025-12-10T06:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.241739 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.241781 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.241797 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.241820 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.241831 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:43Z","lastTransitionTime":"2025-12-10T06:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.344081 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.344148 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.344160 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.344174 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.344184 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:43Z","lastTransitionTime":"2025-12-10T06:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.446070 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.446128 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.446138 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.446152 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.446162 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:43Z","lastTransitionTime":"2025-12-10T06:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.547824 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.547850 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.547859 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.547872 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.547882 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:43Z","lastTransitionTime":"2025-12-10T06:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.588322 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.588408 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:43 crc kubenswrapper[4765]: E1210 06:48:43.588474 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:48:43 crc kubenswrapper[4765]: E1210 06:48:43.588532 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.650522 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.650548 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.650560 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.650578 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.650589 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:43Z","lastTransitionTime":"2025-12-10T06:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.752782 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.752831 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.752848 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.752867 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.752878 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:43Z","lastTransitionTime":"2025-12-10T06:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.856496 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.856540 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.856549 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.856565 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.856575 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:43Z","lastTransitionTime":"2025-12-10T06:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.958766 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.958795 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.958806 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.958823 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:43 crc kubenswrapper[4765]: I1210 06:48:43.958832 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:43Z","lastTransitionTime":"2025-12-10T06:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.060796 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.060852 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.060870 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.060887 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.060898 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:44Z","lastTransitionTime":"2025-12-10T06:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.163765 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.163804 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.163815 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.163830 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.163841 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:44Z","lastTransitionTime":"2025-12-10T06:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.266617 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.266675 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.266692 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.266717 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.266733 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:44Z","lastTransitionTime":"2025-12-10T06:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.370483 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.370522 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.370530 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.370544 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.370553 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:44Z","lastTransitionTime":"2025-12-10T06:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.473257 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.473298 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.473310 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.473325 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.473336 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:44Z","lastTransitionTime":"2025-12-10T06:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.575334 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.575365 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.575373 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.575388 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.575398 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:44Z","lastTransitionTime":"2025-12-10T06:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.588218 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.588233 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:44 crc kubenswrapper[4765]: E1210 06:48:44.588383 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:44 crc kubenswrapper[4765]: E1210 06:48:44.588567 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.677613 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.677649 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.677660 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.677677 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.677687 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:44Z","lastTransitionTime":"2025-12-10T06:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.780118 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.780146 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.780156 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.780172 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.780183 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:44Z","lastTransitionTime":"2025-12-10T06:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.882596 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.882838 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.882900 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.883001 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.883066 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:44Z","lastTransitionTime":"2025-12-10T06:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.985129 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.985166 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.985177 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.985192 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:44 crc kubenswrapper[4765]: I1210 06:48:44.985203 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:44Z","lastTransitionTime":"2025-12-10T06:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.087187 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.087226 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.087236 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.087251 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.087262 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:45Z","lastTransitionTime":"2025-12-10T06:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.189257 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.189289 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.189297 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.189310 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.189320 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:45Z","lastTransitionTime":"2025-12-10T06:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.291826 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.291861 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.291871 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.291885 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.291919 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:45Z","lastTransitionTime":"2025-12-10T06:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.394339 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.394381 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.394390 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.394404 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.394416 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:45Z","lastTransitionTime":"2025-12-10T06:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.495947 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.495979 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.495989 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.496003 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.496013 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:45Z","lastTransitionTime":"2025-12-10T06:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.588394 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.588482 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:45 crc kubenswrapper[4765]: E1210 06:48:45.588578 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:45 crc kubenswrapper[4765]: E1210 06:48:45.588708 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.598142 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.598191 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.598203 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.598217 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.598274 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:45Z","lastTransitionTime":"2025-12-10T06:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.700931 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.701005 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.701024 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.701044 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.701058 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:45Z","lastTransitionTime":"2025-12-10T06:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.804070 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.804133 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.804146 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.804163 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.804175 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:45Z","lastTransitionTime":"2025-12-10T06:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.907297 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.907346 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.907355 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.907370 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:45 crc kubenswrapper[4765]: I1210 06:48:45.907380 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:45Z","lastTransitionTime":"2025-12-10T06:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.010016 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.010051 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.010060 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.010075 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.010134 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:46Z","lastTransitionTime":"2025-12-10T06:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.113864 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.113919 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.113933 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.113958 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.113977 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:46Z","lastTransitionTime":"2025-12-10T06:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.217591 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.218026 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.218174 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.218311 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.218438 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:46Z","lastTransitionTime":"2025-12-10T06:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.321334 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.321612 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.321682 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.321750 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.321815 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:46Z","lastTransitionTime":"2025-12-10T06:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.425709 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.425953 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.426077 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.426192 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.426254 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:46Z","lastTransitionTime":"2025-12-10T06:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.529108 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.529864 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.529933 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.530010 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.530150 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:46Z","lastTransitionTime":"2025-12-10T06:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.588772 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.588908 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:46 crc kubenswrapper[4765]: E1210 06:48:46.589125 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:46 crc kubenswrapper[4765]: E1210 06:48:46.589623 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.589879 4765 scope.go:117] "RemoveContainer" containerID="a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.634372 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.634423 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.634454 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.634472 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.634485 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:46Z","lastTransitionTime":"2025-12-10T06:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.737841 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.737949 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.737965 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.737994 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.738010 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:46Z","lastTransitionTime":"2025-12-10T06:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.832913 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovnkube-controller/1.log" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.835799 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerStarted","Data":"5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002"} Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.836892 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.841254 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.841304 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.841318 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.841340 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.841352 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:46Z","lastTransitionTime":"2025-12-10T06:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.852555 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:46Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.866467 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:46Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.880563 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:46Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.893872 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:46Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.914719 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:46Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.926840 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:46Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.937665 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:46Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.944062 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.944123 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.944137 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.944159 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.944173 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:46Z","lastTransitionTime":"2025-12-10T06:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.959145 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcbf20 0x1fcbc00 0x1fcbba0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z]\\\\nI1210 06:48:31.601251 6230 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:46Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.971341 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k9sld" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efb71311-50ec-4765-8caf-6f2e02b8dce9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k9sld\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:46Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:46 crc kubenswrapper[4765]: I1210 06:48:46.986479 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:46Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.000127 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:46Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.014665 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:47Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.026874 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2eb316210ad53b324b68590c3e4227683bbe57ebce9ea6731a3b7a0a1a6a37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9fc51cd2b956f88dd328f4808ae630f338b157c5a3ceb5e12117ad38f30bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:47Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.046187 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.046233 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.046244 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.046261 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.046273 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:47Z","lastTransitionTime":"2025-12-10T06:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.046464 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:47Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.062490 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:47Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.118125 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:47Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.148843 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.148920 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.148933 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.148949 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.148982 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:47Z","lastTransitionTime":"2025-12-10T06:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.250922 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.250971 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.250980 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.251004 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.251015 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:47Z","lastTransitionTime":"2025-12-10T06:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.353002 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.353037 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.353047 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.353060 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.353068 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:47Z","lastTransitionTime":"2025-12-10T06:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.455157 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.455208 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.455217 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.455231 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.455241 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:47Z","lastTransitionTime":"2025-12-10T06:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.557277 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.557322 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.557334 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.557369 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.557384 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:47Z","lastTransitionTime":"2025-12-10T06:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.588012 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.588027 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:47 crc kubenswrapper[4765]: E1210 06:48:47.588373 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:48:47 crc kubenswrapper[4765]: E1210 06:48:47.588509 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.659715 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.659759 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.659771 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.659830 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.659853 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:47Z","lastTransitionTime":"2025-12-10T06:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.763441 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.763528 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.763561 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.763596 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.763627 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:47Z","lastTransitionTime":"2025-12-10T06:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.841775 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovnkube-controller/2.log" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.842963 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovnkube-controller/1.log" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.846504 4765 generic.go:334] "Generic (PLEG): container finished" podID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerID="5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002" exitCode=1 Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.846565 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerDied","Data":"5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002"} Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.846735 4765 scope.go:117] "RemoveContainer" containerID="a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.848537 4765 scope.go:117] "RemoveContainer" containerID="5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002" Dec 10 06:48:47 crc kubenswrapper[4765]: E1210 06:48:47.849105 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.870209 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.870438 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.870453 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.870471 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.870481 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:47Z","lastTransitionTime":"2025-12-10T06:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.879580 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a329eab27e70435e2adfd77603682afb334faee862ff2b8bb52fa04a9f89bf3a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:31Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcbf20 0x1fcbc00 0x1fcbba0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:31Z is after 2025-08-24T17:21:41Z]\\\\nI1210 06:48:31.601251 6230 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:47Z\\\",\\\"message\\\":\\\"r pod on switch crc\\\\nI1210 06:48:47.490404 6445 services_controller.go:356] Processing sync for service openshift-machine-api/machine-api-operator-webhook for network=default\\\\nI1210 06:48:47.490415 6445 services_controller.go:434] Service openshift-machine-api/machine-api-operator-webhook retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-api-operator-webhook openshift-machine-api 9a52fff0-1401-419f-a09a-83e2d591f2ef 4461 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-api-operator-webhook] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:machine-api-operator-webhook-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00768e93b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{1 0 webhook-server},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{api: clusterapi,k8s-app: controller,},Clust\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:47Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.893953 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k9sld" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efb71311-50ec-4765-8caf-6f2e02b8dce9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k9sld\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:47Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.911034 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:47Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.924651 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:47Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.941158 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:47Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.954148 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:47Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.970465 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:47Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.973011 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.973072 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.973174 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.973212 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.973226 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:47Z","lastTransitionTime":"2025-12-10T06:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.983485 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:47Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:47 crc kubenswrapper[4765]: I1210 06:48:47.999452 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:47Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.011449 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2eb316210ad53b324b68590c3e4227683bbe57ebce9ea6731a3b7a0a1a6a37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9fc51cd2b956f88dd328f4808ae630f338b157c5a3ceb5e12117ad38f30bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.025130 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.039167 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.052931 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.068196 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.076588 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.076636 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.076647 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.076668 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.076682 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:48Z","lastTransitionTime":"2025-12-10T06:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.080819 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.094946 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.180121 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.180166 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.180178 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.180193 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.180204 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:48Z","lastTransitionTime":"2025-12-10T06:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.283476 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.283589 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.283604 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.283627 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.283641 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:48Z","lastTransitionTime":"2025-12-10T06:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.386547 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.386615 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.386629 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.386654 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.386666 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:48Z","lastTransitionTime":"2025-12-10T06:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.488734 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.488783 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.488800 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.488817 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.488826 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:48Z","lastTransitionTime":"2025-12-10T06:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.588379 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.588474 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:48 crc kubenswrapper[4765]: E1210 06:48:48.588552 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:48 crc kubenswrapper[4765]: E1210 06:48:48.588614 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.591053 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.591139 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.591152 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.591170 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.591184 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:48Z","lastTransitionTime":"2025-12-10T06:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.693528 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.693581 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.693595 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.693611 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.693624 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:48Z","lastTransitionTime":"2025-12-10T06:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.795896 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.795936 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.795947 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.795967 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.795980 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:48Z","lastTransitionTime":"2025-12-10T06:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.851505 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovnkube-controller/2.log" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.855033 4765 scope.go:117] "RemoveContainer" containerID="5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002" Dec 10 06:48:48 crc kubenswrapper[4765]: E1210 06:48:48.855221 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.866728 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.878316 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.887299 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.898569 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.898609 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.898621 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.898637 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.898649 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:48Z","lastTransitionTime":"2025-12-10T06:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.906564 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:47Z\\\",\\\"message\\\":\\\"r pod on switch crc\\\\nI1210 06:48:47.490404 6445 services_controller.go:356] Processing sync for service openshift-machine-api/machine-api-operator-webhook for network=default\\\\nI1210 06:48:47.490415 6445 services_controller.go:434] Service openshift-machine-api/machine-api-operator-webhook retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-api-operator-webhook openshift-machine-api 9a52fff0-1401-419f-a09a-83e2d591f2ef 4461 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-api-operator-webhook] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:machine-api-operator-webhook-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00768e93b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{1 0 webhook-server},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{api: clusterapi,k8s-app: controller,},Clust\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.917402 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k9sld" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efb71311-50ec-4765-8caf-6f2e02b8dce9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k9sld\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.929956 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.942069 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.955560 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2eb316210ad53b324b68590c3e4227683bbe57ebce9ea6731a3b7a0a1a6a37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9fc51cd2b956f88dd328f4808ae630f338b157c5a3ceb5e12117ad38f30bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.967702 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.979223 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.989572 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:48 crc kubenswrapper[4765]: I1210 06:48:48.999004 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:48Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.000545 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.000614 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.000641 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.000657 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.000669 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:49Z","lastTransitionTime":"2025-12-10T06:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.010860 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:49Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.021604 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:49Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.034278 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:49Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.046408 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:49Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.103051 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.103121 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.103136 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.103153 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.103170 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:49Z","lastTransitionTime":"2025-12-10T06:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.205389 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.206279 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.206291 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.206311 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.206324 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:49Z","lastTransitionTime":"2025-12-10T06:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.309786 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.309831 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.309841 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.309860 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.309874 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:49Z","lastTransitionTime":"2025-12-10T06:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.412025 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.412063 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.412074 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.412101 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.412111 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:49Z","lastTransitionTime":"2025-12-10T06:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.515962 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.516006 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.516015 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.516029 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.516039 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:49Z","lastTransitionTime":"2025-12-10T06:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.588546 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.588546 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:49 crc kubenswrapper[4765]: E1210 06:48:49.588674 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:49 crc kubenswrapper[4765]: E1210 06:48:49.588765 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.617995 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.618029 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.618037 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.618052 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.618062 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:49Z","lastTransitionTime":"2025-12-10T06:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.720377 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.720410 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.720419 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.720432 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.720441 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:49Z","lastTransitionTime":"2025-12-10T06:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.822503 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.822538 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.822551 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.822566 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.822577 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:49Z","lastTransitionTime":"2025-12-10T06:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.841319 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs\") pod \"network-metrics-daemon-k9sld\" (UID: \"efb71311-50ec-4765-8caf-6f2e02b8dce9\") " pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:49 crc kubenswrapper[4765]: E1210 06:48:49.841441 4765 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 06:48:49 crc kubenswrapper[4765]: E1210 06:48:49.841523 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs podName:efb71311-50ec-4765-8caf-6f2e02b8dce9 nodeName:}" failed. No retries permitted until 2025-12-10 06:49:05.841508279 +0000 UTC m=+65.568173595 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs") pod "network-metrics-daemon-k9sld" (UID: "efb71311-50ec-4765-8caf-6f2e02b8dce9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.924931 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.924969 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.924978 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.924990 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:49 crc kubenswrapper[4765]: I1210 06:48:49.924999 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:49Z","lastTransitionTime":"2025-12-10T06:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.026811 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.026848 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.026856 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.026872 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.026881 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:50Z","lastTransitionTime":"2025-12-10T06:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.129305 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.129344 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.129352 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.129366 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.129377 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:50Z","lastTransitionTime":"2025-12-10T06:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.231200 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.231244 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.231252 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.231268 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.231278 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:50Z","lastTransitionTime":"2025-12-10T06:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.333496 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.333552 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.333565 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.333584 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.333596 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:50Z","lastTransitionTime":"2025-12-10T06:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.346304 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.347211 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:50 crc kubenswrapper[4765]: E1210 06:48:50.347280 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:49:22.347232933 +0000 UTC m=+82.073898259 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.347345 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:50 crc kubenswrapper[4765]: E1210 06:48:50.347358 4765 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 06:48:50 crc kubenswrapper[4765]: E1210 06:48:50.347424 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 06:49:22.347404448 +0000 UTC m=+82.074069764 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.347478 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:50 crc kubenswrapper[4765]: E1210 06:48:50.347608 4765 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 06:48:50 crc kubenswrapper[4765]: E1210 06:48:50.347654 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 06:49:22.347644935 +0000 UTC m=+82.074310261 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 06:48:50 crc kubenswrapper[4765]: E1210 06:48:50.347686 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 06:48:50 crc kubenswrapper[4765]: E1210 06:48:50.347700 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 06:48:50 crc kubenswrapper[4765]: E1210 06:48:50.347717 4765 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:50 crc kubenswrapper[4765]: E1210 06:48:50.347748 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 06:49:22.347737118 +0000 UTC m=+82.074402434 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.435463 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.435510 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.435525 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.435544 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.435554 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:50Z","lastTransitionTime":"2025-12-10T06:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.448417 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:50 crc kubenswrapper[4765]: E1210 06:48:50.448616 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 06:48:50 crc kubenswrapper[4765]: E1210 06:48:50.448650 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 06:48:50 crc kubenswrapper[4765]: E1210 06:48:50.448664 4765 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:50 crc kubenswrapper[4765]: E1210 06:48:50.448721 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 06:49:22.448702719 +0000 UTC m=+82.175368035 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.538154 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.538193 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.538204 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.538222 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.538235 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:50Z","lastTransitionTime":"2025-12-10T06:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.588723 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.588791 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:50 crc kubenswrapper[4765]: E1210 06:48:50.588875 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:50 crc kubenswrapper[4765]: E1210 06:48:50.588930 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.601141 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:50Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.613602 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:50Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.623058 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:50Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.634119 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:50Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.640166 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.640202 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.640212 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.640228 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.640239 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:50Z","lastTransitionTime":"2025-12-10T06:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.644424 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:50Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.659521 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:50Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.673182 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:50Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.684301 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:50Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.695130 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:50Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.703981 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:50Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.720960 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:47Z\\\",\\\"message\\\":\\\"r pod on switch crc\\\\nI1210 06:48:47.490404 6445 services_controller.go:356] Processing sync for service openshift-machine-api/machine-api-operator-webhook for network=default\\\\nI1210 06:48:47.490415 6445 services_controller.go:434] Service openshift-machine-api/machine-api-operator-webhook retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-api-operator-webhook openshift-machine-api 9a52fff0-1401-419f-a09a-83e2d591f2ef 4461 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-api-operator-webhook] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:machine-api-operator-webhook-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00768e93b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{1 0 webhook-server},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{api: clusterapi,k8s-app: controller,},Clust\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:50Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.731218 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k9sld" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efb71311-50ec-4765-8caf-6f2e02b8dce9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k9sld\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:50Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.742695 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:50Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.742988 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.743065 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.743187 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.743221 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.743233 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:50Z","lastTransitionTime":"2025-12-10T06:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.753573 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:50Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.764378 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:50Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.774832 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2eb316210ad53b324b68590c3e4227683bbe57ebce9ea6731a3b7a0a1a6a37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9fc51cd2b956f88dd328f4808ae630f338b157c5a3ceb5e12117ad38f30bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:50Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.845198 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.845237 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.845246 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.845261 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.845269 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:50Z","lastTransitionTime":"2025-12-10T06:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.949943 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.949980 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.949989 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.950008 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:50 crc kubenswrapper[4765]: I1210 06:48:50.950017 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:50Z","lastTransitionTime":"2025-12-10T06:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.053129 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.053165 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.053175 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.053191 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.053202 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:51Z","lastTransitionTime":"2025-12-10T06:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.155695 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.155731 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.155741 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.155755 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.155764 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:51Z","lastTransitionTime":"2025-12-10T06:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.257825 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.257876 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.257888 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.258022 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.258039 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:51Z","lastTransitionTime":"2025-12-10T06:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.360253 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.360294 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.360303 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.360317 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.360328 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:51Z","lastTransitionTime":"2025-12-10T06:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.462923 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.463129 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.463145 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.463160 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.463169 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:51Z","lastTransitionTime":"2025-12-10T06:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.565721 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.565764 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.565774 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.565791 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.565803 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:51Z","lastTransitionTime":"2025-12-10T06:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.588340 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.588381 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:51 crc kubenswrapper[4765]: E1210 06:48:51.588511 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:48:51 crc kubenswrapper[4765]: E1210 06:48:51.588621 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.668746 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.668792 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.668802 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.668827 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.668840 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:51Z","lastTransitionTime":"2025-12-10T06:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.770979 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.771312 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.771326 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.771343 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.771353 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:51Z","lastTransitionTime":"2025-12-10T06:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.874129 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.874173 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.874184 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.874203 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.874214 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:51Z","lastTransitionTime":"2025-12-10T06:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.977381 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.977430 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.977442 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.977461 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:51 crc kubenswrapper[4765]: I1210 06:48:51.977472 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:51Z","lastTransitionTime":"2025-12-10T06:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.080225 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.080264 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.080274 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.080289 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.080300 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:52Z","lastTransitionTime":"2025-12-10T06:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.182293 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.182330 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.182339 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.182353 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.182362 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:52Z","lastTransitionTime":"2025-12-10T06:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.284557 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.284606 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.284618 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.284636 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.284646 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:52Z","lastTransitionTime":"2025-12-10T06:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.387858 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.387897 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.387905 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.387920 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.387935 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:52Z","lastTransitionTime":"2025-12-10T06:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.490190 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.490228 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.490238 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.490253 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.490263 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:52Z","lastTransitionTime":"2025-12-10T06:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.588569 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.588638 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:52 crc kubenswrapper[4765]: E1210 06:48:52.588694 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:52 crc kubenswrapper[4765]: E1210 06:48:52.588816 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.592271 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.592311 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.592319 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.592337 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.592346 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:52Z","lastTransitionTime":"2025-12-10T06:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.694584 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.694616 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.694634 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.694651 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.694663 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:52Z","lastTransitionTime":"2025-12-10T06:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.796818 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.796866 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.796879 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.796894 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.796906 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:52Z","lastTransitionTime":"2025-12-10T06:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.808314 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.808357 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.808368 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.808382 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.808394 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:52Z","lastTransitionTime":"2025-12-10T06:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:52 crc kubenswrapper[4765]: E1210 06:48:52.821985 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:52Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.825523 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.825558 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.825574 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.825589 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.825599 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:52Z","lastTransitionTime":"2025-12-10T06:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:52 crc kubenswrapper[4765]: E1210 06:48:52.836115 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:52Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.839551 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.839708 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.839792 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.839866 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.840122 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:52Z","lastTransitionTime":"2025-12-10T06:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:52 crc kubenswrapper[4765]: E1210 06:48:52.851250 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:52Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.854862 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.855014 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.855126 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.855239 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.855304 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:52Z","lastTransitionTime":"2025-12-10T06:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:52 crc kubenswrapper[4765]: E1210 06:48:52.867377 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:52Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.870693 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.870843 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.870915 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.871000 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.871102 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:52Z","lastTransitionTime":"2025-12-10T06:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:52 crc kubenswrapper[4765]: E1210 06:48:52.882169 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:52Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:52 crc kubenswrapper[4765]: E1210 06:48:52.882329 4765 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.899932 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.900113 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.900215 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.900321 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:52 crc kubenswrapper[4765]: I1210 06:48:52.900410 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:52Z","lastTransitionTime":"2025-12-10T06:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.002841 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.002890 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.002901 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.002918 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.002929 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:53Z","lastTransitionTime":"2025-12-10T06:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.105561 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.105609 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.105626 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.105645 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.105656 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:53Z","lastTransitionTime":"2025-12-10T06:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.207753 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.207797 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.207810 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.207827 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.207911 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:53Z","lastTransitionTime":"2025-12-10T06:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.310220 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.310261 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.310279 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.310301 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.310314 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:53Z","lastTransitionTime":"2025-12-10T06:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.412776 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.413069 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.413194 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.413270 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.413340 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:53Z","lastTransitionTime":"2025-12-10T06:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.516868 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.516912 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.516924 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.516941 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.516955 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:53Z","lastTransitionTime":"2025-12-10T06:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.588579 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:53 crc kubenswrapper[4765]: E1210 06:48:53.588740 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.588613 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:53 crc kubenswrapper[4765]: E1210 06:48:53.589199 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.619324 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.619357 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.619366 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.619378 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.619387 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:53Z","lastTransitionTime":"2025-12-10T06:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.721578 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.721651 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.721661 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.721676 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.721686 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:53Z","lastTransitionTime":"2025-12-10T06:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.758597 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.769923 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.771782 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:53Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.783390 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:53Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.794903 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2eb316210ad53b324b68590c3e4227683bbe57ebce9ea6731a3b7a0a1a6a37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9fc51cd2b956f88dd328f4808ae630f338b157c5a3ceb5e12117ad38f30bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:53Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.809866 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:53Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.823332 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:53Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.825055 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.825082 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.825111 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.825128 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.825140 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:53Z","lastTransitionTime":"2025-12-10T06:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.839685 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:53Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.851572 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:53Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.864945 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:53Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.877820 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:53Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.892272 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:53Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.904138 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:53Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.915742 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:53Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.927406 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.927446 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.927458 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.927475 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.927486 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:53Z","lastTransitionTime":"2025-12-10T06:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.928767 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:53Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.947218 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:47Z\\\",\\\"message\\\":\\\"r pod on switch crc\\\\nI1210 06:48:47.490404 6445 services_controller.go:356] Processing sync for service openshift-machine-api/machine-api-operator-webhook for network=default\\\\nI1210 06:48:47.490415 6445 services_controller.go:434] Service openshift-machine-api/machine-api-operator-webhook retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-api-operator-webhook openshift-machine-api 9a52fff0-1401-419f-a09a-83e2d591f2ef 4461 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-api-operator-webhook] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:machine-api-operator-webhook-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00768e93b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{1 0 webhook-server},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{api: clusterapi,k8s-app: controller,},Clust\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:53Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.959179 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k9sld" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efb71311-50ec-4765-8caf-6f2e02b8dce9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k9sld\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:53Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:53 crc kubenswrapper[4765]: I1210 06:48:53.972998 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:48:53Z is after 2025-08-24T17:21:41Z" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.030124 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.030203 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.030215 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.030229 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.030238 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:54Z","lastTransitionTime":"2025-12-10T06:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.132536 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.132584 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.132595 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.132609 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.132617 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:54Z","lastTransitionTime":"2025-12-10T06:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.235341 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.235385 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.235402 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.235424 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.235436 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:54Z","lastTransitionTime":"2025-12-10T06:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.337535 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.337573 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.337585 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.337601 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.337612 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:54Z","lastTransitionTime":"2025-12-10T06:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.440063 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.440142 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.440155 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.440174 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.440190 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:54Z","lastTransitionTime":"2025-12-10T06:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.542790 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.542923 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.543213 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.543451 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.543528 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:54Z","lastTransitionTime":"2025-12-10T06:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.587889 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.587911 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:54 crc kubenswrapper[4765]: E1210 06:48:54.588028 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:54 crc kubenswrapper[4765]: E1210 06:48:54.588104 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.646571 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.646610 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.646620 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.646636 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.646646 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:54Z","lastTransitionTime":"2025-12-10T06:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.749292 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.749329 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.749339 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.749353 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.749364 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:54Z","lastTransitionTime":"2025-12-10T06:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.851745 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.851794 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.851807 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.851824 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.851834 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:54Z","lastTransitionTime":"2025-12-10T06:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.954276 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.954330 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.954344 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.954361 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:54 crc kubenswrapper[4765]: I1210 06:48:54.954372 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:54Z","lastTransitionTime":"2025-12-10T06:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.056787 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.056823 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.056836 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.056862 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.056879 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:55Z","lastTransitionTime":"2025-12-10T06:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.159319 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.159364 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.159373 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.159388 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.159397 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:55Z","lastTransitionTime":"2025-12-10T06:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.261270 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.261317 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.261330 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.261348 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.261360 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:55Z","lastTransitionTime":"2025-12-10T06:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.363670 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.363694 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.363702 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.363716 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.363731 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:55Z","lastTransitionTime":"2025-12-10T06:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.465665 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.465716 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.465730 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.465749 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.465761 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:55Z","lastTransitionTime":"2025-12-10T06:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.568196 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.568227 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.568237 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.568251 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.568262 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:55Z","lastTransitionTime":"2025-12-10T06:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.588862 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.588899 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:55 crc kubenswrapper[4765]: E1210 06:48:55.588992 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:48:55 crc kubenswrapper[4765]: E1210 06:48:55.589129 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.670580 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.670621 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.670631 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.670647 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.670658 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:55Z","lastTransitionTime":"2025-12-10T06:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.772872 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.772908 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.772920 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.772936 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.772950 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:55Z","lastTransitionTime":"2025-12-10T06:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.874776 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.874818 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.874839 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.874856 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.874866 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:55Z","lastTransitionTime":"2025-12-10T06:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.976905 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.976951 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.976965 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.976985 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:55 crc kubenswrapper[4765]: I1210 06:48:55.976999 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:55Z","lastTransitionTime":"2025-12-10T06:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.078484 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.078518 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.078526 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.078557 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.078567 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:56Z","lastTransitionTime":"2025-12-10T06:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.180822 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.180856 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.180864 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.180877 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.180887 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:56Z","lastTransitionTime":"2025-12-10T06:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.283558 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.283602 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.283614 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.283630 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.283648 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:56Z","lastTransitionTime":"2025-12-10T06:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.386171 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.386209 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.386228 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.386249 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.386260 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:56Z","lastTransitionTime":"2025-12-10T06:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.488894 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.488933 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.488942 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.488957 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.488967 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:56Z","lastTransitionTime":"2025-12-10T06:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.588958 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:56 crc kubenswrapper[4765]: E1210 06:48:56.589125 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.589181 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:56 crc kubenswrapper[4765]: E1210 06:48:56.589240 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.590806 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.590870 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.590881 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.590895 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.590906 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:56Z","lastTransitionTime":"2025-12-10T06:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.692824 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.692870 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.692880 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.692895 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.692904 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:56Z","lastTransitionTime":"2025-12-10T06:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.794875 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.794913 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.794921 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.794936 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.794946 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:56Z","lastTransitionTime":"2025-12-10T06:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.896829 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.896876 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.896888 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.896903 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:56 crc kubenswrapper[4765]: I1210 06:48:56.896912 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:56Z","lastTransitionTime":"2025-12-10T06:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.000227 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.000261 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.000270 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.000286 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.000295 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:57Z","lastTransitionTime":"2025-12-10T06:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.102871 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.102916 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.102928 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.102947 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.102958 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:57Z","lastTransitionTime":"2025-12-10T06:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.205950 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.206009 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.206023 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.206038 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.206047 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:57Z","lastTransitionTime":"2025-12-10T06:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.307926 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.307960 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.307968 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.307981 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.307990 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:57Z","lastTransitionTime":"2025-12-10T06:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.409581 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.409631 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.409640 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.409658 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.409674 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:57Z","lastTransitionTime":"2025-12-10T06:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.511520 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.511634 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.511648 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.511664 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.511675 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:57Z","lastTransitionTime":"2025-12-10T06:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.588452 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.588533 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:57 crc kubenswrapper[4765]: E1210 06:48:57.588575 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:57 crc kubenswrapper[4765]: E1210 06:48:57.588624 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.614220 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.614259 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.614269 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.614284 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.614296 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:57Z","lastTransitionTime":"2025-12-10T06:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.716025 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.716056 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.716065 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.716077 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.716110 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:57Z","lastTransitionTime":"2025-12-10T06:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.818765 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.818803 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.818813 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.818834 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.818845 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:57Z","lastTransitionTime":"2025-12-10T06:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.921787 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.921834 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.921845 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.921862 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:57 crc kubenswrapper[4765]: I1210 06:48:57.921873 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:57Z","lastTransitionTime":"2025-12-10T06:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.023985 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.024037 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.024053 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.024073 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.024124 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:58Z","lastTransitionTime":"2025-12-10T06:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.126465 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.126504 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.126512 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.126526 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.126538 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:58Z","lastTransitionTime":"2025-12-10T06:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.228964 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.229001 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.229009 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.229023 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.229032 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:58Z","lastTransitionTime":"2025-12-10T06:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.331866 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.331908 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.331938 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.331953 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.331962 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:58Z","lastTransitionTime":"2025-12-10T06:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.435132 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.435180 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.435192 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.435210 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.435219 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:58Z","lastTransitionTime":"2025-12-10T06:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.537592 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.537637 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.537648 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.537663 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.537675 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:58Z","lastTransitionTime":"2025-12-10T06:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.588285 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:48:58 crc kubenswrapper[4765]: E1210 06:48:58.588472 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.588772 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:48:58 crc kubenswrapper[4765]: E1210 06:48:58.588871 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.639741 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.639788 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.639800 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.639817 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.639830 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:58Z","lastTransitionTime":"2025-12-10T06:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.742493 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.742526 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.742537 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.742551 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.742561 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:58Z","lastTransitionTime":"2025-12-10T06:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.845406 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.845442 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.845452 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.845469 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.845481 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:58Z","lastTransitionTime":"2025-12-10T06:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.948827 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.948868 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.948877 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.948894 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:58 crc kubenswrapper[4765]: I1210 06:48:58.948903 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:58Z","lastTransitionTime":"2025-12-10T06:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.051499 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.051542 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.051552 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.051567 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.051576 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:59Z","lastTransitionTime":"2025-12-10T06:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.154714 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.154776 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.154787 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.154801 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.154812 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:59Z","lastTransitionTime":"2025-12-10T06:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.257013 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.257073 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.257097 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.257113 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.257124 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:59Z","lastTransitionTime":"2025-12-10T06:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.359315 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.359370 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.359382 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.359399 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.359410 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:59Z","lastTransitionTime":"2025-12-10T06:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.461654 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.461707 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.461715 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.461730 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.461738 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:59Z","lastTransitionTime":"2025-12-10T06:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.564548 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.564580 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.564589 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.564603 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.564611 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:59Z","lastTransitionTime":"2025-12-10T06:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.588791 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.588829 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:48:59 crc kubenswrapper[4765]: E1210 06:48:59.588893 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:48:59 crc kubenswrapper[4765]: E1210 06:48:59.588956 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.666544 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.666585 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.666593 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.666609 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.666622 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:59Z","lastTransitionTime":"2025-12-10T06:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.769063 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.769117 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.769128 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.769143 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.769153 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:59Z","lastTransitionTime":"2025-12-10T06:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.871952 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.871988 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.871996 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.872011 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.872023 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:59Z","lastTransitionTime":"2025-12-10T06:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.974333 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.974380 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.974393 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.974411 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:48:59 crc kubenswrapper[4765]: I1210 06:48:59.974423 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:48:59Z","lastTransitionTime":"2025-12-10T06:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.076756 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.076792 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.076800 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.076813 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.076824 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:00Z","lastTransitionTime":"2025-12-10T06:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.179125 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.179160 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.179169 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.179185 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.179200 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:00Z","lastTransitionTime":"2025-12-10T06:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.281837 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.281874 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.281884 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.281898 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.281908 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:00Z","lastTransitionTime":"2025-12-10T06:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.384299 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.384337 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.384347 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.384363 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.384373 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:00Z","lastTransitionTime":"2025-12-10T06:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.488279 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.488323 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.488336 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.488352 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.488367 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:00Z","lastTransitionTime":"2025-12-10T06:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.588800 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.589149 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:00 crc kubenswrapper[4765]: E1210 06:49:00.589235 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.589543 4765 scope.go:117] "RemoveContainer" containerID="5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002" Dec 10 06:49:00 crc kubenswrapper[4765]: E1210 06:49:00.590304 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.590348 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:00 crc kubenswrapper[4765]: E1210 06:49:00.590356 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.590371 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.590561 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.590588 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.590664 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:00Z","lastTransitionTime":"2025-12-10T06:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.607532 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:47Z\\\",\\\"message\\\":\\\"r pod on switch crc\\\\nI1210 06:48:47.490404 6445 services_controller.go:356] Processing sync for service openshift-machine-api/machine-api-operator-webhook for network=default\\\\nI1210 06:48:47.490415 6445 services_controller.go:434] Service openshift-machine-api/machine-api-operator-webhook retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-api-operator-webhook openshift-machine-api 9a52fff0-1401-419f-a09a-83e2d591f2ef 4461 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-api-operator-webhook] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:machine-api-operator-webhook-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00768e93b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{1 0 webhook-server},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{api: clusterapi,k8s-app: controller,},Clust\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:00Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.618622 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k9sld" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efb71311-50ec-4765-8caf-6f2e02b8dce9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k9sld\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:00Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.631890 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:00Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.642879 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a220b577-ceff-4d65-ae22-8b9141158ad3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58f060c79f0e4210821a5b5485b559551df6b2652557a3d09c13c02aad0f62e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://643ed52b9915c3dd9ee250f2a7ab4b1b6edf81ad2bf22195f11f2100c7f13003\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f12bd7ae1e74d43db676788d30203875f6682194c96ff3db0aed89adc94ec963\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://926164f9337a99fd803f48660fa3af44eeda3e6aae79d367aae85b9c3bb5fb09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://926164f9337a99fd803f48660fa3af44eeda3e6aae79d367aae85b9c3bb5fb09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:00Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.656899 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:00Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.669420 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:00Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.678811 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:00Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.691247 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:00Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.694260 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.694294 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.694324 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.694345 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.694358 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:00Z","lastTransitionTime":"2025-12-10T06:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.702887 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:00Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.715145 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:00Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.724176 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2eb316210ad53b324b68590c3e4227683bbe57ebce9ea6731a3b7a0a1a6a37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9fc51cd2b956f88dd328f4808ae630f338b157c5a3ceb5e12117ad38f30bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:00Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.734446 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:00Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.744521 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:00Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.752463 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:00Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.761818 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:00Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.770710 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:00Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.786745 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:00Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.796429 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.796465 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.796482 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.796502 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.796513 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:00Z","lastTransitionTime":"2025-12-10T06:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.898636 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.898679 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.898692 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.898710 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:00 crc kubenswrapper[4765]: I1210 06:49:00.898720 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:00Z","lastTransitionTime":"2025-12-10T06:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.000722 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.000760 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.000772 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.000788 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.000799 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:01Z","lastTransitionTime":"2025-12-10T06:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.103330 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.103369 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.103380 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.103395 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.103406 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:01Z","lastTransitionTime":"2025-12-10T06:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.205916 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.205967 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.205981 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.205999 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.206012 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:01Z","lastTransitionTime":"2025-12-10T06:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.308229 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.308262 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.308270 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.308288 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.308299 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:01Z","lastTransitionTime":"2025-12-10T06:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.410397 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.410438 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.410448 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.410465 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.410476 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:01Z","lastTransitionTime":"2025-12-10T06:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.513315 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.513349 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.513360 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.513374 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.513382 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:01Z","lastTransitionTime":"2025-12-10T06:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.589001 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:01 crc kubenswrapper[4765]: E1210 06:49:01.589144 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.589387 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:01 crc kubenswrapper[4765]: E1210 06:49:01.589554 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.615407 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.615442 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.615456 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.615472 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.615483 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:01Z","lastTransitionTime":"2025-12-10T06:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.717786 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.717826 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.717837 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.717855 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.717867 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:01Z","lastTransitionTime":"2025-12-10T06:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.819964 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.819991 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.819999 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.820011 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.820021 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:01Z","lastTransitionTime":"2025-12-10T06:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.921959 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.921986 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.921994 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.922011 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:01 crc kubenswrapper[4765]: I1210 06:49:01.922028 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:01Z","lastTransitionTime":"2025-12-10T06:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.024465 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.024518 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.024530 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.024547 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.024559 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:02Z","lastTransitionTime":"2025-12-10T06:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.127350 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.127384 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.127394 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.127410 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.127419 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:02Z","lastTransitionTime":"2025-12-10T06:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.231065 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.231130 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.231143 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.231158 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.231168 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:02Z","lastTransitionTime":"2025-12-10T06:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.333628 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.333893 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.333981 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.334068 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.334166 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:02Z","lastTransitionTime":"2025-12-10T06:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.436560 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.436864 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.436968 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.437056 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.437155 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:02Z","lastTransitionTime":"2025-12-10T06:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.539267 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.539797 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.539905 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.540033 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.540175 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:02Z","lastTransitionTime":"2025-12-10T06:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.588318 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.588363 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:02 crc kubenswrapper[4765]: E1210 06:49:02.588457 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:02 crc kubenswrapper[4765]: E1210 06:49:02.588513 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.642859 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.642893 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.642920 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.642934 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.642945 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:02Z","lastTransitionTime":"2025-12-10T06:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.744995 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.745047 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.745056 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.745070 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.745098 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:02Z","lastTransitionTime":"2025-12-10T06:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.847541 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.847896 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.848048 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.848264 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.848408 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:02Z","lastTransitionTime":"2025-12-10T06:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.950569 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.950626 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.950636 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.950655 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:02 crc kubenswrapper[4765]: I1210 06:49:02.950666 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:02Z","lastTransitionTime":"2025-12-10T06:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.052543 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.052598 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.052607 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.052622 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.052631 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:03Z","lastTransitionTime":"2025-12-10T06:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.128958 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.129004 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.129014 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.129034 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.129047 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:03Z","lastTransitionTime":"2025-12-10T06:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:03 crc kubenswrapper[4765]: E1210 06:49:03.144280 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:03Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.147879 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.147914 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.147924 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.147938 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.147948 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:03Z","lastTransitionTime":"2025-12-10T06:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:03 crc kubenswrapper[4765]: E1210 06:49:03.159330 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:03Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.162820 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.162873 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.162884 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.162901 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.162910 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:03Z","lastTransitionTime":"2025-12-10T06:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:03 crc kubenswrapper[4765]: E1210 06:49:03.176174 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:03Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.180438 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.180492 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.180504 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.180525 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.180538 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:03Z","lastTransitionTime":"2025-12-10T06:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:03 crc kubenswrapper[4765]: E1210 06:49:03.193125 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:03Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.196929 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.197019 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.197038 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.197065 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.197116 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:03Z","lastTransitionTime":"2025-12-10T06:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:03 crc kubenswrapper[4765]: E1210 06:49:03.210753 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:03Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:03 crc kubenswrapper[4765]: E1210 06:49:03.210928 4765 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.212764 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.212816 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.212836 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.212861 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.212876 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:03Z","lastTransitionTime":"2025-12-10T06:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.315842 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.315896 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.315908 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.315927 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.315939 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:03Z","lastTransitionTime":"2025-12-10T06:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.418844 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.418910 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.418923 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.418955 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.418976 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:03Z","lastTransitionTime":"2025-12-10T06:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.521238 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.521279 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.521287 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.521300 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.521311 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:03Z","lastTransitionTime":"2025-12-10T06:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.588277 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.588339 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:03 crc kubenswrapper[4765]: E1210 06:49:03.588429 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:03 crc kubenswrapper[4765]: E1210 06:49:03.588501 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.623586 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.623627 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.623646 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.623668 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.623679 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:03Z","lastTransitionTime":"2025-12-10T06:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.727375 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.727420 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.727429 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.727447 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.727458 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:03Z","lastTransitionTime":"2025-12-10T06:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.830386 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.830430 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.830440 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.830455 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.830464 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:03Z","lastTransitionTime":"2025-12-10T06:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.933218 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.933257 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.933266 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.933281 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:03 crc kubenswrapper[4765]: I1210 06:49:03.933291 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:03Z","lastTransitionTime":"2025-12-10T06:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.035676 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.035715 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.035725 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.035739 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.035748 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:04Z","lastTransitionTime":"2025-12-10T06:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.139277 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.139325 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.139336 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.139358 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.139371 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:04Z","lastTransitionTime":"2025-12-10T06:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.241804 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.241860 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.241872 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.241900 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.241914 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:04Z","lastTransitionTime":"2025-12-10T06:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.346777 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.346826 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.346842 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.346867 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.346882 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:04Z","lastTransitionTime":"2025-12-10T06:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.448435 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.448465 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.448474 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.448503 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.448513 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:04Z","lastTransitionTime":"2025-12-10T06:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.550726 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.550981 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.550994 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.551011 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.551022 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:04Z","lastTransitionTime":"2025-12-10T06:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.589076 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.589232 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:04 crc kubenswrapper[4765]: E1210 06:49:04.589322 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:04 crc kubenswrapper[4765]: E1210 06:49:04.589417 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.653572 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.653622 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.653633 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.653649 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.653660 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:04Z","lastTransitionTime":"2025-12-10T06:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.756281 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.756347 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.756364 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.756773 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.756846 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:04Z","lastTransitionTime":"2025-12-10T06:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.859118 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.859169 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.859196 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.859211 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.859220 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:04Z","lastTransitionTime":"2025-12-10T06:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.961361 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.961395 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.961406 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.961420 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:04 crc kubenswrapper[4765]: I1210 06:49:04.961433 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:04Z","lastTransitionTime":"2025-12-10T06:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.064145 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.064171 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.064181 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.064196 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.064207 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:05Z","lastTransitionTime":"2025-12-10T06:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.167152 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.167222 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.167236 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.167278 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.167295 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:05Z","lastTransitionTime":"2025-12-10T06:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.269948 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.269995 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.270006 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.270024 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.270035 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:05Z","lastTransitionTime":"2025-12-10T06:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.372070 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.372153 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.372163 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.372181 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.372191 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:05Z","lastTransitionTime":"2025-12-10T06:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.474720 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.474753 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.474762 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.474777 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.474787 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:05Z","lastTransitionTime":"2025-12-10T06:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.577893 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.577922 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.577931 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.577944 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.577953 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:05Z","lastTransitionTime":"2025-12-10T06:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.588691 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.588728 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:05 crc kubenswrapper[4765]: E1210 06:49:05.588797 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:05 crc kubenswrapper[4765]: E1210 06:49:05.588853 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.680522 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.680558 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.680566 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.680579 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.680588 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:05Z","lastTransitionTime":"2025-12-10T06:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.782767 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.782794 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.782804 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.782816 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.782826 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:05Z","lastTransitionTime":"2025-12-10T06:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.884939 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.884981 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.884993 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.885009 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.885020 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:05Z","lastTransitionTime":"2025-12-10T06:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.905153 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs\") pod \"network-metrics-daemon-k9sld\" (UID: \"efb71311-50ec-4765-8caf-6f2e02b8dce9\") " pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:05 crc kubenswrapper[4765]: E1210 06:49:05.905292 4765 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 06:49:05 crc kubenswrapper[4765]: E1210 06:49:05.905377 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs podName:efb71311-50ec-4765-8caf-6f2e02b8dce9 nodeName:}" failed. No retries permitted until 2025-12-10 06:49:37.905358189 +0000 UTC m=+97.632023505 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs") pod "network-metrics-daemon-k9sld" (UID: "efb71311-50ec-4765-8caf-6f2e02b8dce9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.987592 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.987627 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.987639 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.987656 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:05 crc kubenswrapper[4765]: I1210 06:49:05.987668 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:05Z","lastTransitionTime":"2025-12-10T06:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.089856 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.089896 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.089907 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.089924 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.089935 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:06Z","lastTransitionTime":"2025-12-10T06:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.192614 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.192662 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.192673 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.192690 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.192701 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:06Z","lastTransitionTime":"2025-12-10T06:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.295481 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.295519 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.295533 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.295550 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.295562 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:06Z","lastTransitionTime":"2025-12-10T06:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.397628 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.397664 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.397674 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.397692 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.397701 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:06Z","lastTransitionTime":"2025-12-10T06:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.500256 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.500285 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.500293 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.500309 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.500317 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:06Z","lastTransitionTime":"2025-12-10T06:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.588500 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.588521 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:06 crc kubenswrapper[4765]: E1210 06:49:06.588630 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:06 crc kubenswrapper[4765]: E1210 06:49:06.588737 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.602112 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.602162 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.602173 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.602190 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.602201 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:06Z","lastTransitionTime":"2025-12-10T06:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.704292 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.704338 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.704361 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.704381 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.704558 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:06Z","lastTransitionTime":"2025-12-10T06:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.806758 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.806827 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.806836 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.806851 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.806861 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:06Z","lastTransitionTime":"2025-12-10T06:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.908372 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.908404 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.908413 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.908427 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:06 crc kubenswrapper[4765]: I1210 06:49:06.908436 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:06Z","lastTransitionTime":"2025-12-10T06:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.010536 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.010572 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.010583 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.010598 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.010606 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:07Z","lastTransitionTime":"2025-12-10T06:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.112876 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.112922 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.112932 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.112950 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.112963 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:07Z","lastTransitionTime":"2025-12-10T06:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.214872 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.214907 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.214916 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.214932 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.214941 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:07Z","lastTransitionTime":"2025-12-10T06:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.317719 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.317760 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.317768 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.317782 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.317794 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:07Z","lastTransitionTime":"2025-12-10T06:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.419906 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.419948 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.419957 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.419971 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.419981 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:07Z","lastTransitionTime":"2025-12-10T06:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.521937 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.521974 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.521984 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.521998 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.522007 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:07Z","lastTransitionTime":"2025-12-10T06:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.588735 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.588786 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:07 crc kubenswrapper[4765]: E1210 06:49:07.588852 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:07 crc kubenswrapper[4765]: E1210 06:49:07.588922 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.623875 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.623911 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.623924 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.623942 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.623953 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:07Z","lastTransitionTime":"2025-12-10T06:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.726377 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.726421 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.726445 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.726463 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.726475 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:07Z","lastTransitionTime":"2025-12-10T06:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.828738 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.828771 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.828781 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.828795 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.828806 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:07Z","lastTransitionTime":"2025-12-10T06:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.907774 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r78vd_d5227381-9852-49ce-96f1-220c42aab12a/kube-multus/0.log" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.907828 4765 generic.go:334] "Generic (PLEG): container finished" podID="d5227381-9852-49ce-96f1-220c42aab12a" containerID="ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce" exitCode=1 Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.907861 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-r78vd" event={"ID":"d5227381-9852-49ce-96f1-220c42aab12a","Type":"ContainerDied","Data":"ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce"} Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.908276 4765 scope.go:117] "RemoveContainer" containerID="ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.924441 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:07Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.931050 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.931160 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.931181 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.931196 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.931208 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:07Z","lastTransitionTime":"2025-12-10T06:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.939249 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:07Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.950475 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:07Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.963585 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:07Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.973920 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:07Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:07 crc kubenswrapper[4765]: I1210 06:49:07.991169 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:47Z\\\",\\\"message\\\":\\\"r pod on switch crc\\\\nI1210 06:48:47.490404 6445 services_controller.go:356] Processing sync for service openshift-machine-api/machine-api-operator-webhook for network=default\\\\nI1210 06:48:47.490415 6445 services_controller.go:434] Service openshift-machine-api/machine-api-operator-webhook retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-api-operator-webhook openshift-machine-api 9a52fff0-1401-419f-a09a-83e2d591f2ef 4461 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-api-operator-webhook] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:machine-api-operator-webhook-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00768e93b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{1 0 webhook-server},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{api: clusterapi,k8s-app: controller,},Clust\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:07Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.002916 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k9sld" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efb71311-50ec-4765-8caf-6f2e02b8dce9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k9sld\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:08Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.016027 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:08Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.026662 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a220b577-ceff-4d65-ae22-8b9141158ad3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58f060c79f0e4210821a5b5485b559551df6b2652557a3d09c13c02aad0f62e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://643ed52b9915c3dd9ee250f2a7ab4b1b6edf81ad2bf22195f11f2100c7f13003\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f12bd7ae1e74d43db676788d30203875f6682194c96ff3db0aed89adc94ec963\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://926164f9337a99fd803f48660fa3af44eeda3e6aae79d367aae85b9c3bb5fb09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://926164f9337a99fd803f48660fa3af44eeda3e6aae79d367aae85b9c3bb5fb09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:08Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.033183 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.033218 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.033229 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.033245 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.033258 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:08Z","lastTransitionTime":"2025-12-10T06:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.037878 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:08Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.047908 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2eb316210ad53b324b68590c3e4227683bbe57ebce9ea6731a3b7a0a1a6a37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9fc51cd2b956f88dd328f4808ae630f338b157c5a3ceb5e12117ad38f30bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:08Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.059022 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:08Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.067874 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:08Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.079953 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:07Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:49:07Z\\\",\\\"message\\\":\\\"2025-12-10T06:48:22+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_19fd4d0a-8399-4f0b-9137-1eea30e90117\\\\n2025-12-10T06:48:22+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_19fd4d0a-8399-4f0b-9137-1eea30e90117 to /host/opt/cni/bin/\\\\n2025-12-10T06:48:22Z [verbose] multus-daemon started\\\\n2025-12-10T06:48:22Z [verbose] Readiness Indicator file check\\\\n2025-12-10T06:49:07Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:08Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.089356 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:08Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.100204 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:08Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.112792 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:08Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.135533 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.135574 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.135583 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.135600 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.135610 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:08Z","lastTransitionTime":"2025-12-10T06:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.240734 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.240769 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.240778 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.240792 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.240803 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:08Z","lastTransitionTime":"2025-12-10T06:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.343065 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.343120 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.343129 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.343144 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.343154 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:08Z","lastTransitionTime":"2025-12-10T06:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.445462 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.445508 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.445519 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.445535 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.445545 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:08Z","lastTransitionTime":"2025-12-10T06:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.547897 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.547939 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.547949 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.547966 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.547977 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:08Z","lastTransitionTime":"2025-12-10T06:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.588467 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.588502 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:08 crc kubenswrapper[4765]: E1210 06:49:08.588614 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:08 crc kubenswrapper[4765]: E1210 06:49:08.588685 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.650613 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.650649 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.650660 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.650680 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.650692 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:08Z","lastTransitionTime":"2025-12-10T06:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.753157 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.753196 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.753213 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.753232 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.753244 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:08Z","lastTransitionTime":"2025-12-10T06:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.856079 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.856135 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.856144 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.856157 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.856173 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:08Z","lastTransitionTime":"2025-12-10T06:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.912486 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r78vd_d5227381-9852-49ce-96f1-220c42aab12a/kube-multus/0.log" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.912529 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-r78vd" event={"ID":"d5227381-9852-49ce-96f1-220c42aab12a","Type":"ContainerStarted","Data":"1bd98078a25ccee3c5a9a8905539343e090e5867d0c53c6a6966075c2d0cc6ae"} Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.925177 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:08Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.937472 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:08Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.951101 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bd98078a25ccee3c5a9a8905539343e090e5867d0c53c6a6966075c2d0cc6ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:49:07Z\\\",\\\"message\\\":\\\"2025-12-10T06:48:22+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_19fd4d0a-8399-4f0b-9137-1eea30e90117\\\\n2025-12-10T06:48:22+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_19fd4d0a-8399-4f0b-9137-1eea30e90117 to /host/opt/cni/bin/\\\\n2025-12-10T06:48:22Z [verbose] multus-daemon started\\\\n2025-12-10T06:48:22Z [verbose] Readiness Indicator file check\\\\n2025-12-10T06:49:07Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:08Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.958740 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.958784 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.958796 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.958815 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.958828 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:08Z","lastTransitionTime":"2025-12-10T06:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.961953 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2eb316210ad53b324b68590c3e4227683bbe57ebce9ea6731a3b7a0a1a6a37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9fc51cd2b956f88dd328f4808ae630f338b157c5a3ceb5e12117ad38f30bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:08Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.974940 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:08Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:08 crc kubenswrapper[4765]: I1210 06:49:08.993967 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:08Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.005799 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:09Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.018126 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:09Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.029280 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:09Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.043362 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:09Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.053678 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k9sld" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efb71311-50ec-4765-8caf-6f2e02b8dce9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k9sld\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:09Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.060604 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.060634 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.060643 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.060656 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.060665 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:09Z","lastTransitionTime":"2025-12-10T06:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.068030 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:09Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.080820 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a220b577-ceff-4d65-ae22-8b9141158ad3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58f060c79f0e4210821a5b5485b559551df6b2652557a3d09c13c02aad0f62e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://643ed52b9915c3dd9ee250f2a7ab4b1b6edf81ad2bf22195f11f2100c7f13003\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f12bd7ae1e74d43db676788d30203875f6682194c96ff3db0aed89adc94ec963\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://926164f9337a99fd803f48660fa3af44eeda3e6aae79d367aae85b9c3bb5fb09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://926164f9337a99fd803f48660fa3af44eeda3e6aae79d367aae85b9c3bb5fb09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:09Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.093255 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:09Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.105159 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:09Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.115685 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:09Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.132948 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:47Z\\\",\\\"message\\\":\\\"r pod on switch crc\\\\nI1210 06:48:47.490404 6445 services_controller.go:356] Processing sync for service openshift-machine-api/machine-api-operator-webhook for network=default\\\\nI1210 06:48:47.490415 6445 services_controller.go:434] Service openshift-machine-api/machine-api-operator-webhook retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-api-operator-webhook openshift-machine-api 9a52fff0-1401-419f-a09a-83e2d591f2ef 4461 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-api-operator-webhook] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:machine-api-operator-webhook-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00768e93b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{1 0 webhook-server},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{api: clusterapi,k8s-app: controller,},Clust\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:09Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.162589 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.162626 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.162636 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.162652 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.162664 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:09Z","lastTransitionTime":"2025-12-10T06:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.264671 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.264717 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.264728 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.264744 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.264756 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:09Z","lastTransitionTime":"2025-12-10T06:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.366741 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.366815 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.366830 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.366848 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.366861 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:09Z","lastTransitionTime":"2025-12-10T06:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.469173 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.469214 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.469223 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.469241 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.469250 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:09Z","lastTransitionTime":"2025-12-10T06:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.571693 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.571731 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.571742 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.571756 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.571767 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:09Z","lastTransitionTime":"2025-12-10T06:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.588174 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.588189 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:09 crc kubenswrapper[4765]: E1210 06:49:09.588330 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:09 crc kubenswrapper[4765]: E1210 06:49:09.588382 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.674413 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.674444 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.674455 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.674505 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.674519 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:09Z","lastTransitionTime":"2025-12-10T06:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.777146 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.777365 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.777464 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.777536 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.777597 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:09Z","lastTransitionTime":"2025-12-10T06:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.882686 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.882960 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.883040 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.883153 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.883220 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:09Z","lastTransitionTime":"2025-12-10T06:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.985241 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.985273 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.985302 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.985318 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:09 crc kubenswrapper[4765]: I1210 06:49:09.985329 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:09Z","lastTransitionTime":"2025-12-10T06:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.087492 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.087529 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.087539 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.087554 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.087565 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:10Z","lastTransitionTime":"2025-12-10T06:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.189719 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.189771 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.189780 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.189794 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.189803 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:10Z","lastTransitionTime":"2025-12-10T06:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.291907 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.291979 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.292011 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.292027 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.292039 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:10Z","lastTransitionTime":"2025-12-10T06:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.393918 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.393959 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.393969 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.393985 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.393996 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:10Z","lastTransitionTime":"2025-12-10T06:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.496026 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.496069 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.496081 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.496110 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.496120 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:10Z","lastTransitionTime":"2025-12-10T06:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.588502 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.588508 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:10 crc kubenswrapper[4765]: E1210 06:49:10.588651 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:10 crc kubenswrapper[4765]: E1210 06:49:10.588814 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.597637 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.597691 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.597700 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.597712 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.597721 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:10Z","lastTransitionTime":"2025-12-10T06:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.602998 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:10Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.617114 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:10Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.626426 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:10Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.637322 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:10Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.647539 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:10Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.661221 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:10Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.672186 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:10Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.689574 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:47Z\\\",\\\"message\\\":\\\"r pod on switch crc\\\\nI1210 06:48:47.490404 6445 services_controller.go:356] Processing sync for service openshift-machine-api/machine-api-operator-webhook for network=default\\\\nI1210 06:48:47.490415 6445 services_controller.go:434] Service openshift-machine-api/machine-api-operator-webhook retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-api-operator-webhook openshift-machine-api 9a52fff0-1401-419f-a09a-83e2d591f2ef 4461 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-api-operator-webhook] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:machine-api-operator-webhook-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00768e93b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{1 0 webhook-server},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{api: clusterapi,k8s-app: controller,},Clust\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:10Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.699937 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.699977 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.699987 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.700002 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.700012 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:10Z","lastTransitionTime":"2025-12-10T06:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.701272 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k9sld" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efb71311-50ec-4765-8caf-6f2e02b8dce9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k9sld\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:10Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.714444 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:10Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.725022 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a220b577-ceff-4d65-ae22-8b9141158ad3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58f060c79f0e4210821a5b5485b559551df6b2652557a3d09c13c02aad0f62e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://643ed52b9915c3dd9ee250f2a7ab4b1b6edf81ad2bf22195f11f2100c7f13003\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f12bd7ae1e74d43db676788d30203875f6682194c96ff3db0aed89adc94ec963\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://926164f9337a99fd803f48660fa3af44eeda3e6aae79d367aae85b9c3bb5fb09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://926164f9337a99fd803f48660fa3af44eeda3e6aae79d367aae85b9c3bb5fb09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:10Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.736935 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:10Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.747840 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:10Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.759787 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:10Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.771513 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:10Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.782992 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bd98078a25ccee3c5a9a8905539343e090e5867d0c53c6a6966075c2d0cc6ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:49:07Z\\\",\\\"message\\\":\\\"2025-12-10T06:48:22+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_19fd4d0a-8399-4f0b-9137-1eea30e90117\\\\n2025-12-10T06:48:22+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_19fd4d0a-8399-4f0b-9137-1eea30e90117 to /host/opt/cni/bin/\\\\n2025-12-10T06:48:22Z [verbose] multus-daemon started\\\\n2025-12-10T06:48:22Z [verbose] Readiness Indicator file check\\\\n2025-12-10T06:49:07Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:10Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.794345 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2eb316210ad53b324b68590c3e4227683bbe57ebce9ea6731a3b7a0a1a6a37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9fc51cd2b956f88dd328f4808ae630f338b157c5a3ceb5e12117ad38f30bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:10Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.801871 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.801903 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.801913 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.801929 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.801941 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:10Z","lastTransitionTime":"2025-12-10T06:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.904413 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.904466 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.904479 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.904497 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:10 crc kubenswrapper[4765]: I1210 06:49:10.904509 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:10Z","lastTransitionTime":"2025-12-10T06:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.006419 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.006469 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.006479 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.006497 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.006510 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:11Z","lastTransitionTime":"2025-12-10T06:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.108451 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.108497 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.108508 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.108528 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.108542 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:11Z","lastTransitionTime":"2025-12-10T06:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.210945 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.210985 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.210994 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.211009 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.211018 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:11Z","lastTransitionTime":"2025-12-10T06:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.312917 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.312947 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.312954 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.312966 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.312975 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:11Z","lastTransitionTime":"2025-12-10T06:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.414958 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.414996 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.415008 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.415023 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.415034 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:11Z","lastTransitionTime":"2025-12-10T06:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.516792 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.516843 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.516854 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.516873 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.516884 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:11Z","lastTransitionTime":"2025-12-10T06:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.588546 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.588569 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:11 crc kubenswrapper[4765]: E1210 06:49:11.588716 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:11 crc kubenswrapper[4765]: E1210 06:49:11.588851 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.620102 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.620158 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.620178 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.620204 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.620220 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:11Z","lastTransitionTime":"2025-12-10T06:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.722974 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.723017 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.723025 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.723043 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.723054 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:11Z","lastTransitionTime":"2025-12-10T06:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.826253 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.826319 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.826339 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.826391 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.826430 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:11Z","lastTransitionTime":"2025-12-10T06:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.929355 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.929387 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.929399 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.929415 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:11 crc kubenswrapper[4765]: I1210 06:49:11.929427 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:11Z","lastTransitionTime":"2025-12-10T06:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.031896 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.032201 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.032340 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.032438 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.032528 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:12Z","lastTransitionTime":"2025-12-10T06:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.135299 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.135600 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.135680 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.135780 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.135870 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:12Z","lastTransitionTime":"2025-12-10T06:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.238338 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.238377 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.238387 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.238402 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.238412 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:12Z","lastTransitionTime":"2025-12-10T06:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.340200 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.340243 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.340257 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.340274 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.340285 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:12Z","lastTransitionTime":"2025-12-10T06:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.442852 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.442898 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.442911 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.442927 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.442938 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:12Z","lastTransitionTime":"2025-12-10T06:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.545066 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.545148 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.545159 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.545181 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.545199 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:12Z","lastTransitionTime":"2025-12-10T06:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.589159 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.589184 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:12 crc kubenswrapper[4765]: E1210 06:49:12.589337 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:12 crc kubenswrapper[4765]: E1210 06:49:12.589499 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.647391 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.647429 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.647440 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.647456 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.647482 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:12Z","lastTransitionTime":"2025-12-10T06:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.750310 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.750346 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.750355 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.750369 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.750378 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:12Z","lastTransitionTime":"2025-12-10T06:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.852379 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.852416 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.852424 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.852471 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.852481 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:12Z","lastTransitionTime":"2025-12-10T06:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.955200 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.955254 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.955266 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.955285 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:12 crc kubenswrapper[4765]: I1210 06:49:12.955299 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:12Z","lastTransitionTime":"2025-12-10T06:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.058080 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.058153 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.058165 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.058184 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.058193 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:13Z","lastTransitionTime":"2025-12-10T06:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.161701 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.161747 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.161757 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.161775 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.161787 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:13Z","lastTransitionTime":"2025-12-10T06:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.263830 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.263874 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.263887 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.263907 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.263924 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:13Z","lastTransitionTime":"2025-12-10T06:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.366303 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.366341 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.366352 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.366370 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.366382 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:13Z","lastTransitionTime":"2025-12-10T06:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.468713 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.468743 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.468754 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.468769 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.468780 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:13Z","lastTransitionTime":"2025-12-10T06:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.571173 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.571224 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.571237 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.571259 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.571270 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:13Z","lastTransitionTime":"2025-12-10T06:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.588470 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.588514 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:13 crc kubenswrapper[4765]: E1210 06:49:13.588839 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:13 crc kubenswrapper[4765]: E1210 06:49:13.588928 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.589190 4765 scope.go:117] "RemoveContainer" containerID="5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.612355 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.612383 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.612393 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.612411 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.612422 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:13Z","lastTransitionTime":"2025-12-10T06:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:13 crc kubenswrapper[4765]: E1210 06:49:13.625511 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:13Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.628685 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.628780 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.628842 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.628901 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.628956 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:13Z","lastTransitionTime":"2025-12-10T06:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:13 crc kubenswrapper[4765]: E1210 06:49:13.642152 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:13Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.645697 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.646046 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.646065 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.646109 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.646126 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:13Z","lastTransitionTime":"2025-12-10T06:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:13 crc kubenswrapper[4765]: E1210 06:49:13.659040 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:13Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.663422 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.663468 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.663479 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.663497 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.663510 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:13Z","lastTransitionTime":"2025-12-10T06:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:13 crc kubenswrapper[4765]: E1210 06:49:13.675605 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:13Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.678902 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.678941 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.678952 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.678970 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.678981 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:13Z","lastTransitionTime":"2025-12-10T06:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:13 crc kubenswrapper[4765]: E1210 06:49:13.690958 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:13Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:13 crc kubenswrapper[4765]: E1210 06:49:13.691080 4765 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.692781 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.692818 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.692829 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.692845 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.692856 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:13Z","lastTransitionTime":"2025-12-10T06:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.795532 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.795585 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.795598 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.795651 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.795662 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:13Z","lastTransitionTime":"2025-12-10T06:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.897793 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.897839 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.897852 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.897869 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.897881 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:13Z","lastTransitionTime":"2025-12-10T06:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.928928 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovnkube-controller/2.log" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.931361 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerStarted","Data":"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839"} Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.931844 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.945042 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:13Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.955347 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:13Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.973473 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:13Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.982369 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k9sld" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efb71311-50ec-4765-8caf-6f2e02b8dce9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k9sld\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:13Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:13 crc kubenswrapper[4765]: I1210 06:49:13.996519 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:13Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.002161 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.002195 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.002205 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.002221 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.002230 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:14Z","lastTransitionTime":"2025-12-10T06:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.010072 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a220b577-ceff-4d65-ae22-8b9141158ad3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58f060c79f0e4210821a5b5485b559551df6b2652557a3d09c13c02aad0f62e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://643ed52b9915c3dd9ee250f2a7ab4b1b6edf81ad2bf22195f11f2100c7f13003\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f12bd7ae1e74d43db676788d30203875f6682194c96ff3db0aed89adc94ec963\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://926164f9337a99fd803f48660fa3af44eeda3e6aae79d367aae85b9c3bb5fb09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://926164f9337a99fd803f48660fa3af44eeda3e6aae79d367aae85b9c3bb5fb09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:14Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.022184 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:14Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.033690 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:14Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.044482 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:14Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.065813 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:47Z\\\",\\\"message\\\":\\\"r pod on switch crc\\\\nI1210 06:48:47.490404 6445 services_controller.go:356] Processing sync for service openshift-machine-api/machine-api-operator-webhook for network=default\\\\nI1210 06:48:47.490415 6445 services_controller.go:434] Service openshift-machine-api/machine-api-operator-webhook retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-api-operator-webhook openshift-machine-api 9a52fff0-1401-419f-a09a-83e2d591f2ef 4461 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-api-operator-webhook] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:machine-api-operator-webhook-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00768e93b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{1 0 webhook-server},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{api: clusterapi,k8s-app: controller,},Clust\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:49:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:14Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.083961 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:14Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.098078 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:14Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.104944 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.104991 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.105000 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.105017 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.105025 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:14Z","lastTransitionTime":"2025-12-10T06:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.111426 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bd98078a25ccee3c5a9a8905539343e090e5867d0c53c6a6966075c2d0cc6ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:49:07Z\\\",\\\"message\\\":\\\"2025-12-10T06:48:22+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_19fd4d0a-8399-4f0b-9137-1eea30e90117\\\\n2025-12-10T06:48:22+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_19fd4d0a-8399-4f0b-9137-1eea30e90117 to /host/opt/cni/bin/\\\\n2025-12-10T06:48:22Z [verbose] multus-daemon started\\\\n2025-12-10T06:48:22Z [verbose] Readiness Indicator file check\\\\n2025-12-10T06:49:07Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:14Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.130955 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2eb316210ad53b324b68590c3e4227683bbe57ebce9ea6731a3b7a0a1a6a37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9fc51cd2b956f88dd328f4808ae630f338b157c5a3ceb5e12117ad38f30bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:14Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.147380 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:14Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.169321 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:14Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.188476 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:14Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.207577 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.207626 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.207638 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.207662 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.207675 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:14Z","lastTransitionTime":"2025-12-10T06:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.309875 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.309914 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.309923 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.309957 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.309967 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:14Z","lastTransitionTime":"2025-12-10T06:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.412403 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.412445 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.412462 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.412478 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.412491 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:14Z","lastTransitionTime":"2025-12-10T06:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.515042 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.515117 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.515129 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.515144 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.515156 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:14Z","lastTransitionTime":"2025-12-10T06:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.588049 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.588136 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:14 crc kubenswrapper[4765]: E1210 06:49:14.588203 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:14 crc kubenswrapper[4765]: E1210 06:49:14.588245 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.617145 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.617188 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.617199 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.617216 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.617227 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:14Z","lastTransitionTime":"2025-12-10T06:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.718764 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.718805 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.718845 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.718860 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.718871 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:14Z","lastTransitionTime":"2025-12-10T06:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.821617 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.821654 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.821663 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.821677 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.821689 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:14Z","lastTransitionTime":"2025-12-10T06:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.924015 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.924069 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.924111 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.924132 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.924144 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:14Z","lastTransitionTime":"2025-12-10T06:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.935454 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovnkube-controller/3.log" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.936191 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovnkube-controller/2.log" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.938995 4765 generic.go:334] "Generic (PLEG): container finished" podID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerID="e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839" exitCode=1 Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.939042 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerDied","Data":"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839"} Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.939098 4765 scope.go:117] "RemoveContainer" containerID="5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.940037 4765 scope.go:117] "RemoveContainer" containerID="e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839" Dec 10 06:49:14 crc kubenswrapper[4765]: E1210 06:49:14.940212 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.953497 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:14Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.964875 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:14Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.977529 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bd98078a25ccee3c5a9a8905539343e090e5867d0c53c6a6966075c2d0cc6ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:49:07Z\\\",\\\"message\\\":\\\"2025-12-10T06:48:22+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_19fd4d0a-8399-4f0b-9137-1eea30e90117\\\\n2025-12-10T06:48:22+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_19fd4d0a-8399-4f0b-9137-1eea30e90117 to /host/opt/cni/bin/\\\\n2025-12-10T06:48:22Z [verbose] multus-daemon started\\\\n2025-12-10T06:48:22Z [verbose] Readiness Indicator file check\\\\n2025-12-10T06:49:07Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:14Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.986703 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2eb316210ad53b324b68590c3e4227683bbe57ebce9ea6731a3b7a0a1a6a37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9fc51cd2b956f88dd328f4808ae630f338b157c5a3ceb5e12117ad38f30bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:14Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:14 crc kubenswrapper[4765]: I1210 06:49:14.997047 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:14Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.008742 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:15Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.017295 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:15Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.027800 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.027849 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.027861 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.027879 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.027893 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:15Z","lastTransitionTime":"2025-12-10T06:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.028654 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:15Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.039105 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:15Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.053397 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:15Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.071747 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d0588a8a5df076c4734210346e2c05cac0b0f46af0c9a0773d08a81eaa1b002\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:48:47Z\\\",\\\"message\\\":\\\"r pod on switch crc\\\\nI1210 06:48:47.490404 6445 services_controller.go:356] Processing sync for service openshift-machine-api/machine-api-operator-webhook for network=default\\\\nI1210 06:48:47.490415 6445 services_controller.go:434] Service openshift-machine-api/machine-api-operator-webhook retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-api-operator-webhook openshift-machine-api 9a52fff0-1401-419f-a09a-83e2d591f2ef 4461 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-api-operator-webhook] map[capability.openshift.io/name:MachineAPI exclude.release.openshift.io/internal-openshift-hosted:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:machine-api-operator-webhook-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00768e93b \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{1 0 webhook-server},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{api: clusterapi,k8s-app: controller,},Clust\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:49:14Z\\\",\\\"message\\\":\\\"I1210 06:49:14.559918 6848 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-5wj7r after 0 failed attempt(s)\\\\nI1210 06:49:14.559923 6848 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-dcsjq\\\\nI1210 06:49:14.559830 6848 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1210 06:49:14.559818 6848 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1210 06:49:14.559928 6848 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-r78vd\\\\nI1210 06:49:14.559935 6848 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nF1210 06:49:14.559929 6848 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:49:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:15Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.082238 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k9sld" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efb71311-50ec-4765-8caf-6f2e02b8dce9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k9sld\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:15Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.094654 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:15Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.105793 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a220b577-ceff-4d65-ae22-8b9141158ad3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58f060c79f0e4210821a5b5485b559551df6b2652557a3d09c13c02aad0f62e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://643ed52b9915c3dd9ee250f2a7ab4b1b6edf81ad2bf22195f11f2100c7f13003\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f12bd7ae1e74d43db676788d30203875f6682194c96ff3db0aed89adc94ec963\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://926164f9337a99fd803f48660fa3af44eeda3e6aae79d367aae85b9c3bb5fb09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://926164f9337a99fd803f48660fa3af44eeda3e6aae79d367aae85b9c3bb5fb09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:15Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.117574 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:15Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.129423 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:15Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.130330 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.130362 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.130369 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.130384 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.130392 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:15Z","lastTransitionTime":"2025-12-10T06:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.141573 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:15Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.233226 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.233271 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.233283 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.233300 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.233312 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:15Z","lastTransitionTime":"2025-12-10T06:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.339748 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.340175 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.340191 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.340210 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.340222 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:15Z","lastTransitionTime":"2025-12-10T06:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.442312 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.442375 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.442387 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.442408 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.442420 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:15Z","lastTransitionTime":"2025-12-10T06:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.544754 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.545735 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.545762 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.545780 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.545790 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:15Z","lastTransitionTime":"2025-12-10T06:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.588146 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.588162 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:15 crc kubenswrapper[4765]: E1210 06:49:15.588287 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:15 crc kubenswrapper[4765]: E1210 06:49:15.588440 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.648708 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.648761 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.648777 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.648795 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.648808 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:15Z","lastTransitionTime":"2025-12-10T06:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.751676 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.751716 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.751726 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.751741 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.751750 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:15Z","lastTransitionTime":"2025-12-10T06:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.854670 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.854726 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.854737 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.854753 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.854764 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:15Z","lastTransitionTime":"2025-12-10T06:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.943209 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovnkube-controller/3.log" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.947413 4765 scope.go:117] "RemoveContainer" containerID="e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839" Dec 10 06:49:15 crc kubenswrapper[4765]: E1210 06:49:15.947567 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.956608 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.956684 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.956695 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.956739 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.956749 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:15Z","lastTransitionTime":"2025-12-10T06:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.958288 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:15Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.969514 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:15Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.982471 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:15Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:15 crc kubenswrapper[4765]: I1210 06:49:15.994960 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:15Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.006366 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:16Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.016825 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:16Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.027237 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:16Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.037502 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:16Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.055669 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:49:14Z\\\",\\\"message\\\":\\\"I1210 06:49:14.559918 6848 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-5wj7r after 0 failed attempt(s)\\\\nI1210 06:49:14.559923 6848 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-dcsjq\\\\nI1210 06:49:14.559830 6848 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1210 06:49:14.559818 6848 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1210 06:49:14.559928 6848 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-r78vd\\\\nI1210 06:49:14.559935 6848 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nF1210 06:49:14.559929 6848 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:49:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:16Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.059543 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.059564 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.059573 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.059586 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.059594 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:16Z","lastTransitionTime":"2025-12-10T06:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.066960 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k9sld" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efb71311-50ec-4765-8caf-6f2e02b8dce9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k9sld\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:16Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.080234 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:16Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.089645 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a220b577-ceff-4d65-ae22-8b9141158ad3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58f060c79f0e4210821a5b5485b559551df6b2652557a3d09c13c02aad0f62e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://643ed52b9915c3dd9ee250f2a7ab4b1b6edf81ad2bf22195f11f2100c7f13003\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f12bd7ae1e74d43db676788d30203875f6682194c96ff3db0aed89adc94ec963\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://926164f9337a99fd803f48660fa3af44eeda3e6aae79d367aae85b9c3bb5fb09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://926164f9337a99fd803f48660fa3af44eeda3e6aae79d367aae85b9c3bb5fb09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:16Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.098885 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:16Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.108183 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2eb316210ad53b324b68590c3e4227683bbe57ebce9ea6731a3b7a0a1a6a37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9fc51cd2b956f88dd328f4808ae630f338b157c5a3ceb5e12117ad38f30bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:16Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.120265 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:16Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.133514 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:16Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.146361 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bd98078a25ccee3c5a9a8905539343e090e5867d0c53c6a6966075c2d0cc6ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:49:07Z\\\",\\\"message\\\":\\\"2025-12-10T06:48:22+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_19fd4d0a-8399-4f0b-9137-1eea30e90117\\\\n2025-12-10T06:48:22+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_19fd4d0a-8399-4f0b-9137-1eea30e90117 to /host/opt/cni/bin/\\\\n2025-12-10T06:48:22Z [verbose] multus-daemon started\\\\n2025-12-10T06:48:22Z [verbose] Readiness Indicator file check\\\\n2025-12-10T06:49:07Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:16Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.162134 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.162175 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.162183 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.162197 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.162206 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:16Z","lastTransitionTime":"2025-12-10T06:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.265114 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.265155 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.265170 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.265187 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.265199 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:16Z","lastTransitionTime":"2025-12-10T06:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.367626 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.367669 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.367678 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.367696 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.367708 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:16Z","lastTransitionTime":"2025-12-10T06:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.470335 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.470379 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.470393 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.470410 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.470423 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:16Z","lastTransitionTime":"2025-12-10T06:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.572310 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.572347 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.572359 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.572381 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.572393 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:16Z","lastTransitionTime":"2025-12-10T06:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.588921 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.588963 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:16 crc kubenswrapper[4765]: E1210 06:49:16.589103 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:16 crc kubenswrapper[4765]: E1210 06:49:16.589325 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.675794 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.676042 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.676056 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.676076 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.676110 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:16Z","lastTransitionTime":"2025-12-10T06:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.778267 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.778307 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.778318 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.778334 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.778346 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:16Z","lastTransitionTime":"2025-12-10T06:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.881143 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.881188 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.881207 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.881227 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.881246 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:16Z","lastTransitionTime":"2025-12-10T06:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.986800 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.986862 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.986874 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.986891 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:16 crc kubenswrapper[4765]: I1210 06:49:16.986901 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:16Z","lastTransitionTime":"2025-12-10T06:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.089314 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.089425 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.089441 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.089456 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.089466 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:17Z","lastTransitionTime":"2025-12-10T06:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.192030 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.192057 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.192065 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.192077 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.192101 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:17Z","lastTransitionTime":"2025-12-10T06:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.294184 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.294239 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.294255 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.294277 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.294288 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:17Z","lastTransitionTime":"2025-12-10T06:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.396760 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.396805 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.396815 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.396830 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.396842 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:17Z","lastTransitionTime":"2025-12-10T06:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.499415 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.499457 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.499468 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.499485 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.499496 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:17Z","lastTransitionTime":"2025-12-10T06:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.588131 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.588248 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:17 crc kubenswrapper[4765]: E1210 06:49:17.588380 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:17 crc kubenswrapper[4765]: E1210 06:49:17.588675 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.599803 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.602145 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.602178 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.602190 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.602205 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.602215 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:17Z","lastTransitionTime":"2025-12-10T06:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.704463 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.704495 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.704503 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.704517 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.704526 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:17Z","lastTransitionTime":"2025-12-10T06:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.806405 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.806438 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.806461 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.806473 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.806482 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:17Z","lastTransitionTime":"2025-12-10T06:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.908284 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.908322 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.908333 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.908348 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:17 crc kubenswrapper[4765]: I1210 06:49:17.908357 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:17Z","lastTransitionTime":"2025-12-10T06:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.010660 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.010714 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.010725 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.010742 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.010753 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:18Z","lastTransitionTime":"2025-12-10T06:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.112767 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.112840 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.112851 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.112869 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.112881 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:18Z","lastTransitionTime":"2025-12-10T06:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.215022 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.215077 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.215105 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.215122 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.215134 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:18Z","lastTransitionTime":"2025-12-10T06:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.317560 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.317599 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.317612 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.317630 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.317642 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:18Z","lastTransitionTime":"2025-12-10T06:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.419702 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.419753 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.419766 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.419784 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.419796 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:18Z","lastTransitionTime":"2025-12-10T06:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.522931 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.522978 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.522989 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.523004 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.523014 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:18Z","lastTransitionTime":"2025-12-10T06:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.588042 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.588126 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:18 crc kubenswrapper[4765]: E1210 06:49:18.588181 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:18 crc kubenswrapper[4765]: E1210 06:49:18.588288 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.625672 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.625713 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.625723 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.625735 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.625744 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:18Z","lastTransitionTime":"2025-12-10T06:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.728538 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.728583 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.728594 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.728611 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.728622 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:18Z","lastTransitionTime":"2025-12-10T06:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.830631 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.830675 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.830689 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.830705 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.830716 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:18Z","lastTransitionTime":"2025-12-10T06:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.932959 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.932996 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.933005 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.933019 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:18 crc kubenswrapper[4765]: I1210 06:49:18.933029 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:18Z","lastTransitionTime":"2025-12-10T06:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.035072 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.035140 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.035151 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.035168 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.035177 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:19Z","lastTransitionTime":"2025-12-10T06:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.136721 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.136765 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.136773 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.136792 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.136802 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:19Z","lastTransitionTime":"2025-12-10T06:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.240577 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.240619 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.240636 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.240653 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.240663 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:19Z","lastTransitionTime":"2025-12-10T06:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.343703 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.343739 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.343747 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.343762 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.343771 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:19Z","lastTransitionTime":"2025-12-10T06:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.446339 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.446403 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.446416 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.446432 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.446441 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:19Z","lastTransitionTime":"2025-12-10T06:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.549322 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.549360 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.549378 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.549395 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.549411 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:19Z","lastTransitionTime":"2025-12-10T06:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.588312 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.588322 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:19 crc kubenswrapper[4765]: E1210 06:49:19.588605 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:19 crc kubenswrapper[4765]: E1210 06:49:19.588475 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.651352 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.651395 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.651407 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.651423 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.651433 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:19Z","lastTransitionTime":"2025-12-10T06:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.753498 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.753534 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.753542 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.753556 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.753566 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:19Z","lastTransitionTime":"2025-12-10T06:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.855742 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.855781 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.856078 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.856150 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.856162 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:19Z","lastTransitionTime":"2025-12-10T06:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.957834 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.957893 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.957907 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.957927 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:19 crc kubenswrapper[4765]: I1210 06:49:19.957937 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:19Z","lastTransitionTime":"2025-12-10T06:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.060388 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.060427 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.060438 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.060453 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.060464 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:20Z","lastTransitionTime":"2025-12-10T06:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.162530 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.162578 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.162592 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.162612 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.162623 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:20Z","lastTransitionTime":"2025-12-10T06:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.265252 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.265294 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.265304 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.265319 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.265328 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:20Z","lastTransitionTime":"2025-12-10T06:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.367447 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.367498 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.367519 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.367535 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.367546 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:20Z","lastTransitionTime":"2025-12-10T06:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.470240 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.470343 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.470354 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.470369 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.470378 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:20Z","lastTransitionTime":"2025-12-10T06:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.573024 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.573060 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.573068 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.573108 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.573127 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:20Z","lastTransitionTime":"2025-12-10T06:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.588616 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:20 crc kubenswrapper[4765]: E1210 06:49:20.588744 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.588875 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:20 crc kubenswrapper[4765]: E1210 06:49:20.588995 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.607441 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1daf7bf6-eab2-4204-9c70-52303876adb6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://968ac093e4144edc9a913cf3bf094e4e53bfbdb683d89f1295ff2e4e54a922c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://242b1af41a23126fd0f97ed46ddf935900ec5ec5b63b2ea02bb20cf21f6fb7ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b029fdc5e4db0b2caf4af8084e233bc8a2c12fefdbefdc6129b2d6cfaabcef4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dfafc91dd81fc735fd85c1f08c2c79a25c1770b4d06055c9fd804f670ad6cf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd71f6dc010649075f75e94e68e9ef048371493e64376909153e8f3d902ac129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc204cb81c29a5156106504359f9e2e7aac7e25ccbbfafa885d90b04f36f38e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc204cb81c29a5156106504359f9e2e7aac7e25ccbbfafa885d90b04f36f38e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52cf96507e19767cc05ec2b4317b5333d654f11d72dcfdbafbc3a07559dc71c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52cf96507e19767cc05ec2b4317b5333d654f11d72dcfdbafbc3a07559dc71c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://160fc5bb9223833197e8de0c338359fe0abc5d0cb0639998ae993c1c6d17f405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://160fc5bb9223833197e8de0c338359fe0abc5d0cb0639998ae993c1c6d17f405\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.617714 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a407b15fe2a005907d20150f004bd1e3e01b52188f055d8c72d305860cd84687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b78cae379953c1c7b6de4cc5d8eed4812dcd1ea6be9318f95a393af08f1319d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.626594 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d0714d9a-e9d5-4aca-8341-a073849f9234\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2d3fdc1f23f9ab38b6b03dc3bbabcbb383fc3595832143cabe7cac26a896176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wt5g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xlv8w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.637929 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51cca3f8-b6e3-4c05-a289-32192e52215a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f7e237689338f5eccf67b7fd82584df7acadba7827db78e98cfbee486735714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f402ef73bf258661774bd475d8911e01ad9a138fd38542049ebafce043bdae3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e762c0558d46503dd0942c1621f035140a7b38824866b96add8539380aac1ee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34a2876f7fb3f12711ac296a6860290cc73a767c7e58b6c9224b40a7b0a3e9e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a85941be42d871653df3f6fa30161887c80d829b3284f99eb17b375b6ec589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c58f9a6e9c7b648b45152f9e24e2d69f658ca4a5958773a57aa3452645eee551\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00688228cfbc17dbeb238d918d5ca1df0462e20d4b14ad187073073ac936884e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8w29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lcr6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.648888 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"901d0a63-c83f-4175-9e6f-70695c2ee2ff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1210 06:48:12.960147 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1210 06:48:12.969156 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2845087399/tls.crt::/tmp/serving-cert-2845087399/tls.key\\\\\\\"\\\\nI1210 06:48:18.356505 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1210 06:48:18.379043 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1210 06:48:18.379077 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1210 06:48:18.379119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1210 06:48:18.379126 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 06:48:18.385056 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1210 06:48:18.385108 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385116 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 06:48:18.385122 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 06:48:18.385127 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 06:48:18.385132 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 06:48:18.385136 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 06:48:18.385339 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1210 06:48:18.388490 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.659660 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a220b577-ceff-4d65-ae22-8b9141158ad3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58f060c79f0e4210821a5b5485b559551df6b2652557a3d09c13c02aad0f62e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://643ed52b9915c3dd9ee250f2a7ab4b1b6edf81ad2bf22195f11f2100c7f13003\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f12bd7ae1e74d43db676788d30203875f6682194c96ff3db0aed89adc94ec963\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://926164f9337a99fd803f48660fa3af44eeda3e6aae79d367aae85b9c3bb5fb09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://926164f9337a99fd803f48660fa3af44eeda3e6aae79d367aae85b9c3bb5fb09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.672120 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.675184 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.675213 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.675224 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.675240 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.675253 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:20Z","lastTransitionTime":"2025-12-10T06:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.684316 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.693574 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-t8knp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4344cfbb-18ba-4190-82aa-1a2aa6ccbdcc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1402243d8b288ef89d24316abf4be57a5a6bb3a688f1d78c398801857dfc7d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfzct\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:18Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-t8knp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.709232 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:49:14Z\\\",\\\"message\\\":\\\"I1210 06:49:14.559918 6848 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-5wj7r after 0 failed attempt(s)\\\\nI1210 06:49:14.559923 6848 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-dcsjq\\\\nI1210 06:49:14.559830 6848 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1210 06:49:14.559818 6848 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1210 06:49:14.559928 6848 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-r78vd\\\\nI1210 06:49:14.559935 6848 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nF1210 06:49:14.559929 6848 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:49:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lsdwg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5wj7r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.717711 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k9sld" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"efb71311-50ec-4765-8caf-6f2e02b8dce9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxxpc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:34Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k9sld\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.728116 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:19Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1916e05129dd7d2b78aa02128c77351f9d89a4b804dd9999bfa2bb93c13c25f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.737488 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b68eeb35f1e45af30484ed6c39296bc88efc6cec446d77b119f1948e5c8e05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.749244 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-r78vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5227381-9852-49ce-96f1-220c42aab12a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bd98078a25ccee3c5a9a8905539343e090e5867d0c53c6a6966075c2d0cc6ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T06:49:07Z\\\",\\\"message\\\":\\\"2025-12-10T06:48:22+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_19fd4d0a-8399-4f0b-9137-1eea30e90117\\\\n2025-12-10T06:48:22+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_19fd4d0a-8399-4f0b-9137-1eea30e90117 to /host/opt/cni/bin/\\\\n2025-12-10T06:48:22Z [verbose] multus-daemon started\\\\n2025-12-10T06:48:22Z [verbose] Readiness Indicator file check\\\\n2025-12-10T06:49:07Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T06:48:21Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:49:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nc9wc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:21Z\\\"}}\" for pod \"openshift-multus\"/\"multus-r78vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.758855 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af4ada46-5cbb-4675-9e5a-4abf08bbea89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2eb316210ad53b324b68590c3e4227683bbe57ebce9ea6731a3b7a0a1a6a37e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9fc51cd2b956f88dd328f4808ae630f338b157c5a3ceb5e12117ad38f30bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr2vq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-whkng\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.774273 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a74b520e-cbc2-44b5-961b-a75d517c9429\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39df4c8e1f39cd444fad2292587b4002cbb0a3aa23737bdb77b7ed1e1f266f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ce327e0d6ccfaf93bd16dde791259b9fddfa7590402b2283acc9d43a87155b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8ef5362135ee6e22bb6b1859d7b53eea589fa917fdd2721803335bcd928e4fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:00Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.777586 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.777632 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.777642 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.777659 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.777670 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:20Z","lastTransitionTime":"2025-12-10T06:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.787973 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.797916 4765 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-dcsjq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c565f723-8bce-482f-a2c7-19581496ee74\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T06:48:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a33fe6f6e750591dc864b0c2d34e929fdbfe335993518d71c009b4f049ccad40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T06:48:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rvhm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T06:48:20Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-dcsjq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:20Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.879947 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.879987 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.879995 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.880013 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.880022 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:20Z","lastTransitionTime":"2025-12-10T06:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.982862 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.982914 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.982931 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.982949 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:20 crc kubenswrapper[4765]: I1210 06:49:20.982962 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:20Z","lastTransitionTime":"2025-12-10T06:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.085201 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.085274 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.085287 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.085303 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.085313 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:21Z","lastTransitionTime":"2025-12-10T06:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.187687 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.187735 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.187751 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.187769 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.187780 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:21Z","lastTransitionTime":"2025-12-10T06:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.290110 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.290152 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.290163 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.290190 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.290204 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:21Z","lastTransitionTime":"2025-12-10T06:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.392634 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.392675 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.392686 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.392702 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.392715 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:21Z","lastTransitionTime":"2025-12-10T06:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.495386 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.495413 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.495421 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.495435 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.495446 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:21Z","lastTransitionTime":"2025-12-10T06:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.588790 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.588883 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:21 crc kubenswrapper[4765]: E1210 06:49:21.589014 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:21 crc kubenswrapper[4765]: E1210 06:49:21.589073 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.597692 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.597725 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.597736 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.597751 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.597763 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:21Z","lastTransitionTime":"2025-12-10T06:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.699923 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.699959 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.699967 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.699981 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.699990 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:21Z","lastTransitionTime":"2025-12-10T06:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.802302 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.802345 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.802355 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.802370 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.802381 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:21Z","lastTransitionTime":"2025-12-10T06:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.905119 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.905156 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.905165 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.905181 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:21 crc kubenswrapper[4765]: I1210 06:49:21.905191 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:21Z","lastTransitionTime":"2025-12-10T06:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.007194 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.007233 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.007242 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.007258 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.007269 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:22Z","lastTransitionTime":"2025-12-10T06:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.109512 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.109617 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.109627 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.109640 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.109651 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:22Z","lastTransitionTime":"2025-12-10T06:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.212170 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.212220 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.212231 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.212256 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.212269 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:22Z","lastTransitionTime":"2025-12-10T06:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.314713 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.314742 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.314749 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.314785 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.314820 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:22Z","lastTransitionTime":"2025-12-10T06:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.370201 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:49:22 crc kubenswrapper[4765]: E1210 06:49:22.370374 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:26.370343969 +0000 UTC m=+146.097009285 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.370426 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.370457 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.370478 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:22 crc kubenswrapper[4765]: E1210 06:49:22.370578 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 06:49:22 crc kubenswrapper[4765]: E1210 06:49:22.370593 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 06:49:22 crc kubenswrapper[4765]: E1210 06:49:22.370602 4765 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:49:22 crc kubenswrapper[4765]: E1210 06:49:22.370615 4765 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 06:49:22 crc kubenswrapper[4765]: E1210 06:49:22.370651 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 06:50:26.370639117 +0000 UTC m=+146.097304433 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:49:22 crc kubenswrapper[4765]: E1210 06:49:22.370670 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 06:50:26.370659448 +0000 UTC m=+146.097324844 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 06:49:22 crc kubenswrapper[4765]: E1210 06:49:22.370664 4765 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 06:49:22 crc kubenswrapper[4765]: E1210 06:49:22.370777 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 06:50:26.370755841 +0000 UTC m=+146.097421237 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.416650 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.416689 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.416700 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.416716 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.416726 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:22Z","lastTransitionTime":"2025-12-10T06:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.471010 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:22 crc kubenswrapper[4765]: E1210 06:49:22.471195 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 06:49:22 crc kubenswrapper[4765]: E1210 06:49:22.471227 4765 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 06:49:22 crc kubenswrapper[4765]: E1210 06:49:22.471238 4765 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:49:22 crc kubenswrapper[4765]: E1210 06:49:22.471296 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 06:50:26.471279709 +0000 UTC m=+146.197945025 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.518916 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.518960 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.518974 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.518988 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.518998 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:22Z","lastTransitionTime":"2025-12-10T06:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.588704 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.588704 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:22 crc kubenswrapper[4765]: E1210 06:49:22.588860 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:22 crc kubenswrapper[4765]: E1210 06:49:22.588925 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.598903 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.621106 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.621139 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.621151 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.621186 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.621197 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:22Z","lastTransitionTime":"2025-12-10T06:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.724409 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.724461 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.724480 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.724502 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.724515 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:22Z","lastTransitionTime":"2025-12-10T06:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.827412 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.827456 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.827468 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.827486 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.827499 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:22Z","lastTransitionTime":"2025-12-10T06:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.930099 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.930134 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.930145 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.930161 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:22 crc kubenswrapper[4765]: I1210 06:49:22.930171 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:22Z","lastTransitionTime":"2025-12-10T06:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.032290 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.032326 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.032337 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.032355 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.032366 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:23Z","lastTransitionTime":"2025-12-10T06:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.134941 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.134974 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.135017 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.135034 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.135042 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:23Z","lastTransitionTime":"2025-12-10T06:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.237442 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.237505 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.237516 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.237555 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.237567 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:23Z","lastTransitionTime":"2025-12-10T06:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.340120 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.340173 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.340195 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.340216 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.340230 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:23Z","lastTransitionTime":"2025-12-10T06:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.442603 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.442650 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.442661 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.442678 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.442690 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:23Z","lastTransitionTime":"2025-12-10T06:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.545168 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.545206 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.545215 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.545231 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.545240 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:23Z","lastTransitionTime":"2025-12-10T06:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.588471 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.588544 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:23 crc kubenswrapper[4765]: E1210 06:49:23.588623 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:23 crc kubenswrapper[4765]: E1210 06:49:23.588668 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.647990 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.648023 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.648064 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.648115 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.648129 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:23Z","lastTransitionTime":"2025-12-10T06:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.750467 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.750514 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.750531 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.750550 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.750564 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:23Z","lastTransitionTime":"2025-12-10T06:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.856325 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.856740 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.856753 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.856776 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.856791 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:23Z","lastTransitionTime":"2025-12-10T06:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.892669 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.892728 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.892742 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.892764 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.892779 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:23Z","lastTransitionTime":"2025-12-10T06:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:23 crc kubenswrapper[4765]: E1210 06:49:23.910200 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.915422 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.915470 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.915482 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.915501 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.915515 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:23Z","lastTransitionTime":"2025-12-10T06:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:23 crc kubenswrapper[4765]: E1210 06:49:23.930195 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.935239 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.935293 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.935309 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.935332 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.935347 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:23Z","lastTransitionTime":"2025-12-10T06:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:23 crc kubenswrapper[4765]: E1210 06:49:23.951364 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.958183 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.958236 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.958245 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.958263 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.958274 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:23Z","lastTransitionTime":"2025-12-10T06:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:23 crc kubenswrapper[4765]: E1210 06:49:23.972496 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.976673 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.976730 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.976740 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.976762 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.976774 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:23Z","lastTransitionTime":"2025-12-10T06:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:23 crc kubenswrapper[4765]: E1210 06:49:23.988682 4765 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T06:49:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0e72ffd7-59d0-4884-9bfb-94943bbc5155\\\",\\\"systemUUID\\\":\\\"f84f1526-209e-4d0d-8c67-84d36e1af992\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T06:49:23Z is after 2025-08-24T17:21:41Z" Dec 10 06:49:23 crc kubenswrapper[4765]: E1210 06:49:23.988927 4765 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.990741 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.990784 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.990795 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.990814 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:23 crc kubenswrapper[4765]: I1210 06:49:23.990824 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:23Z","lastTransitionTime":"2025-12-10T06:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.093427 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.093477 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.093490 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.093506 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.093516 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:24Z","lastTransitionTime":"2025-12-10T06:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.195417 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.195455 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.195465 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.195480 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.195492 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:24Z","lastTransitionTime":"2025-12-10T06:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.297326 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.297358 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.297367 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.297383 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.297403 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:24Z","lastTransitionTime":"2025-12-10T06:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.400307 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.400347 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.400360 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.400376 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.400390 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:24Z","lastTransitionTime":"2025-12-10T06:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.502963 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.502995 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.503030 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.503045 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.503055 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:24Z","lastTransitionTime":"2025-12-10T06:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.588776 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.588776 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:24 crc kubenswrapper[4765]: E1210 06:49:24.589073 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:24 crc kubenswrapper[4765]: E1210 06:49:24.589287 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.609218 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.609274 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.609285 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.609301 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.609311 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:24Z","lastTransitionTime":"2025-12-10T06:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.712029 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.712073 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.712100 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.712116 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.712148 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:24Z","lastTransitionTime":"2025-12-10T06:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.814549 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.814594 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.814603 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.814621 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.814632 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:24Z","lastTransitionTime":"2025-12-10T06:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.916522 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.916566 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.916577 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.916596 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:24 crc kubenswrapper[4765]: I1210 06:49:24.916607 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:24Z","lastTransitionTime":"2025-12-10T06:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.018556 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.018641 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.018653 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.018670 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.018682 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:25Z","lastTransitionTime":"2025-12-10T06:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.120984 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.121020 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.121029 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.121042 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.121051 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:25Z","lastTransitionTime":"2025-12-10T06:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.223416 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.223457 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.223466 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.223480 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.223490 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:25Z","lastTransitionTime":"2025-12-10T06:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.325786 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.325835 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.325849 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.325867 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.325878 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:25Z","lastTransitionTime":"2025-12-10T06:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.427896 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.427940 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.427949 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.427964 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.427974 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:25Z","lastTransitionTime":"2025-12-10T06:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.530652 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.530687 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.530696 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.530709 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.530719 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:25Z","lastTransitionTime":"2025-12-10T06:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.588407 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.588429 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:25 crc kubenswrapper[4765]: E1210 06:49:25.588649 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:25 crc kubenswrapper[4765]: E1210 06:49:25.588565 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.633510 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.633549 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.633563 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.633582 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.633592 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:25Z","lastTransitionTime":"2025-12-10T06:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.735999 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.736054 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.736067 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.736123 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.736138 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:25Z","lastTransitionTime":"2025-12-10T06:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.838377 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.838418 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.838434 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.838453 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.838463 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:25Z","lastTransitionTime":"2025-12-10T06:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.941374 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.941415 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.941432 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.941451 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:25 crc kubenswrapper[4765]: I1210 06:49:25.941461 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:25Z","lastTransitionTime":"2025-12-10T06:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.044170 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.044224 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.044234 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.044248 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.044257 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:26Z","lastTransitionTime":"2025-12-10T06:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.146449 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.146501 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.146513 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.146531 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.146544 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:26Z","lastTransitionTime":"2025-12-10T06:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.249670 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.249708 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.249717 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.249732 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.249741 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:26Z","lastTransitionTime":"2025-12-10T06:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.352335 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.352383 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.352395 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.352413 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.352424 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:26Z","lastTransitionTime":"2025-12-10T06:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.454688 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.454735 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.454745 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.454760 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.454769 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:26Z","lastTransitionTime":"2025-12-10T06:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.556860 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.556921 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.556932 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.556948 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.556960 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:26Z","lastTransitionTime":"2025-12-10T06:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.588266 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:26 crc kubenswrapper[4765]: E1210 06:49:26.588441 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.588663 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:26 crc kubenswrapper[4765]: E1210 06:49:26.588758 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.589133 4765 scope.go:117] "RemoveContainer" containerID="e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839" Dec 10 06:49:26 crc kubenswrapper[4765]: E1210 06:49:26.589399 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.659909 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.659944 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.659952 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.659965 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.659974 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:26Z","lastTransitionTime":"2025-12-10T06:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.777204 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.777229 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.777237 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.777251 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.777259 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:26Z","lastTransitionTime":"2025-12-10T06:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.879682 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.879724 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.879736 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.879750 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.879762 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:26Z","lastTransitionTime":"2025-12-10T06:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.981402 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.981445 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.981457 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.981472 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:26 crc kubenswrapper[4765]: I1210 06:49:26.981483 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:26Z","lastTransitionTime":"2025-12-10T06:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.088381 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.088432 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.088447 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.088466 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.088479 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:27Z","lastTransitionTime":"2025-12-10T06:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.190560 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.190611 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.190620 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.190636 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.190646 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:27Z","lastTransitionTime":"2025-12-10T06:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.292990 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.293036 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.293052 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.293070 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.293097 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:27Z","lastTransitionTime":"2025-12-10T06:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.395557 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.395667 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.395685 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.395702 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.395713 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:27Z","lastTransitionTime":"2025-12-10T06:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.497623 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.497671 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.497681 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.497695 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.497706 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:27Z","lastTransitionTime":"2025-12-10T06:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.588877 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.588956 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:27 crc kubenswrapper[4765]: E1210 06:49:27.589027 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:27 crc kubenswrapper[4765]: E1210 06:49:27.589131 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.599775 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.599826 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.599837 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.599853 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.599867 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:27Z","lastTransitionTime":"2025-12-10T06:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.701715 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.701752 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.701760 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.701774 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.701783 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:27Z","lastTransitionTime":"2025-12-10T06:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.804013 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.804056 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.804071 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.804106 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.804126 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:27Z","lastTransitionTime":"2025-12-10T06:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.906466 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.906509 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.906517 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.906530 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:27 crc kubenswrapper[4765]: I1210 06:49:27.906541 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:27Z","lastTransitionTime":"2025-12-10T06:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.009019 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.009237 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.009253 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.009270 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.009282 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:28Z","lastTransitionTime":"2025-12-10T06:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.111365 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.111406 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.111417 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.111433 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.111444 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:28Z","lastTransitionTime":"2025-12-10T06:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.213747 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.213780 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.213791 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.213809 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.213819 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:28Z","lastTransitionTime":"2025-12-10T06:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.316176 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.316205 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.316213 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.316227 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.316237 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:28Z","lastTransitionTime":"2025-12-10T06:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.418386 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.418417 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.418424 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.418438 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.418447 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:28Z","lastTransitionTime":"2025-12-10T06:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.519953 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.519986 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.519996 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.520012 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.520023 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:28Z","lastTransitionTime":"2025-12-10T06:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.587892 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.587928 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:28 crc kubenswrapper[4765]: E1210 06:49:28.588020 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:28 crc kubenswrapper[4765]: E1210 06:49:28.588122 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.621987 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.622041 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.622053 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.622069 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.622078 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:28Z","lastTransitionTime":"2025-12-10T06:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.724847 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.724916 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.724934 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.724954 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.724965 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:28Z","lastTransitionTime":"2025-12-10T06:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.827048 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.827102 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.827115 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.827130 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.827143 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:28Z","lastTransitionTime":"2025-12-10T06:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.930204 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.930270 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.930282 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.930301 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:28 crc kubenswrapper[4765]: I1210 06:49:28.930314 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:28Z","lastTransitionTime":"2025-12-10T06:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.034001 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.034071 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.034119 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.034149 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.034167 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:29Z","lastTransitionTime":"2025-12-10T06:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.137633 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.137682 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.137692 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.137710 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.137723 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:29Z","lastTransitionTime":"2025-12-10T06:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.240210 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.240250 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.240262 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.240278 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.240290 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:29Z","lastTransitionTime":"2025-12-10T06:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.342715 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.342803 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.342816 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.342833 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.342846 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:29Z","lastTransitionTime":"2025-12-10T06:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.445422 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.445456 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.445467 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.445485 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.445500 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:29Z","lastTransitionTime":"2025-12-10T06:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.548016 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.548065 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.548078 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.548116 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.548128 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:29Z","lastTransitionTime":"2025-12-10T06:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.588471 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.588504 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:29 crc kubenswrapper[4765]: E1210 06:49:29.588697 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:29 crc kubenswrapper[4765]: E1210 06:49:29.588867 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.650367 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.650403 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.650412 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.650425 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.650435 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:29Z","lastTransitionTime":"2025-12-10T06:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.752200 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.752247 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.752259 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.752276 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.752287 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:29Z","lastTransitionTime":"2025-12-10T06:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.858473 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.858516 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.858525 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.858541 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.858553 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:29Z","lastTransitionTime":"2025-12-10T06:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.960708 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.962315 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.962359 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.962382 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:29 crc kubenswrapper[4765]: I1210 06:49:29.962394 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:29Z","lastTransitionTime":"2025-12-10T06:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.065264 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.065312 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.065323 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.065341 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.065364 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:30Z","lastTransitionTime":"2025-12-10T06:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.167408 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.167462 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.167474 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.167489 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.167498 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:30Z","lastTransitionTime":"2025-12-10T06:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.270345 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.270396 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.270407 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.270423 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.270437 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:30Z","lastTransitionTime":"2025-12-10T06:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.372590 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.372625 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.372635 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.372648 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.372657 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:30Z","lastTransitionTime":"2025-12-10T06:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.475191 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.475230 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.475241 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.475255 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.475266 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:30Z","lastTransitionTime":"2025-12-10T06:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.577561 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.577602 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.577617 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.577634 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.577644 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:30Z","lastTransitionTime":"2025-12-10T06:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.588197 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.588220 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:30 crc kubenswrapper[4765]: E1210 06:49:30.588388 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:30 crc kubenswrapper[4765]: E1210 06:49:30.588433 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.638350 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-r78vd" podStartSLOduration=70.638331229 podStartE2EDuration="1m10.638331229s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:49:30.637418823 +0000 UTC m=+90.364084149" watchObservedRunningTime="2025-12-10 06:49:30.638331229 +0000 UTC m=+90.364996545" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.651481 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-whkng" podStartSLOduration=69.651465125 podStartE2EDuration="1m9.651465125s" podCreationTimestamp="2025-12-10 06:48:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:49:30.650911449 +0000 UTC m=+90.377576765" watchObservedRunningTime="2025-12-10 06:49:30.651465125 +0000 UTC m=+90.378130441" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.663799 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=8.663786188 podStartE2EDuration="8.663786188s" podCreationTimestamp="2025-12-10 06:49:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:49:30.663318734 +0000 UTC m=+90.389984050" watchObservedRunningTime="2025-12-10 06:49:30.663786188 +0000 UTC m=+90.390451504" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.682731 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.682769 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.682781 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.682800 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.682813 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:30Z","lastTransitionTime":"2025-12-10T06:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.695673 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=72.69565343 podStartE2EDuration="1m12.69565343s" podCreationTimestamp="2025-12-10 06:48:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:49:30.677351846 +0000 UTC m=+90.404017152" watchObservedRunningTime="2025-12-10 06:49:30.69565343 +0000 UTC m=+90.422318746" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.728325 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-dcsjq" podStartSLOduration=70.728304015 podStartE2EDuration="1m10.728304015s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:49:30.706242903 +0000 UTC m=+90.432908219" watchObservedRunningTime="2025-12-10 06:49:30.728304015 +0000 UTC m=+90.454969331" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.729049 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=13.729043926 podStartE2EDuration="13.729043926s" podCreationTimestamp="2025-12-10 06:49:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:49:30.728301665 +0000 UTC m=+90.454966981" watchObservedRunningTime="2025-12-10 06:49:30.729043926 +0000 UTC m=+90.455709242" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.755246 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podStartSLOduration=70.755228966 podStartE2EDuration="1m10.755228966s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:49:30.754294249 +0000 UTC m=+90.480959565" watchObservedRunningTime="2025-12-10 06:49:30.755228966 +0000 UTC m=+90.481894282" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.785003 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.785042 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.785053 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.785071 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.785098 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:30Z","lastTransitionTime":"2025-12-10T06:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.800789 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-lcr6j" podStartSLOduration=70.80077367 podStartE2EDuration="1m10.80077367s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:49:30.775204348 +0000 UTC m=+90.501869664" watchObservedRunningTime="2025-12-10 06:49:30.80077367 +0000 UTC m=+90.527438986" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.835234 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=72.835202646 podStartE2EDuration="1m12.835202646s" podCreationTimestamp="2025-12-10 06:48:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:49:30.830962924 +0000 UTC m=+90.557628240" watchObservedRunningTime="2025-12-10 06:49:30.835202646 +0000 UTC m=+90.561867962" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.847688 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=37.847666403 podStartE2EDuration="37.847666403s" podCreationTimestamp="2025-12-10 06:48:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:49:30.8475651 +0000 UTC m=+90.574230426" watchObservedRunningTime="2025-12-10 06:49:30.847666403 +0000 UTC m=+90.574331709" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.887999 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.888040 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.888048 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.888065 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.888076 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:30Z","lastTransitionTime":"2025-12-10T06:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.989590 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.989624 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.989632 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.989645 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:30 crc kubenswrapper[4765]: I1210 06:49:30.989653 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:30Z","lastTransitionTime":"2025-12-10T06:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.092042 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.092116 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.092130 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.092146 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.092155 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:31Z","lastTransitionTime":"2025-12-10T06:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.194626 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.194673 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.194686 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.194703 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.194716 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:31Z","lastTransitionTime":"2025-12-10T06:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.297000 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.297353 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.297421 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.297504 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.297578 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:31Z","lastTransitionTime":"2025-12-10T06:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.400028 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.400285 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.400299 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.400313 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.400322 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:31Z","lastTransitionTime":"2025-12-10T06:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.502936 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.502980 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.502998 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.503071 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.503106 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:31Z","lastTransitionTime":"2025-12-10T06:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.588596 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:31 crc kubenswrapper[4765]: E1210 06:49:31.588755 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.588621 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:31 crc kubenswrapper[4765]: E1210 06:49:31.588843 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.605788 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.605832 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.605850 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.605871 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.605884 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:31Z","lastTransitionTime":"2025-12-10T06:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.707994 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.708029 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.708037 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.708051 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.708060 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:31Z","lastTransitionTime":"2025-12-10T06:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.810459 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.810493 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.810502 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.810530 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.810539 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:31Z","lastTransitionTime":"2025-12-10T06:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.912418 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.912449 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.912457 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.912469 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:31 crc kubenswrapper[4765]: I1210 06:49:31.912480 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:31Z","lastTransitionTime":"2025-12-10T06:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.014864 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.014906 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.014916 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.014930 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.014942 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:32Z","lastTransitionTime":"2025-12-10T06:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.118219 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.118273 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.118284 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.118300 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.118311 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:32Z","lastTransitionTime":"2025-12-10T06:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.222546 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.222611 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.222621 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.222635 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.222644 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:32Z","lastTransitionTime":"2025-12-10T06:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.324738 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.324780 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.324792 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.324810 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.324823 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:32Z","lastTransitionTime":"2025-12-10T06:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.426543 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.426596 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.426607 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.426623 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.426634 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:32Z","lastTransitionTime":"2025-12-10T06:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.529301 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.529340 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.529350 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.529370 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.529379 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:32Z","lastTransitionTime":"2025-12-10T06:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.588947 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.588947 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:32 crc kubenswrapper[4765]: E1210 06:49:32.589111 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:32 crc kubenswrapper[4765]: E1210 06:49:32.589232 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.634644 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.634706 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.634719 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.634738 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.634803 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:32Z","lastTransitionTime":"2025-12-10T06:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.737448 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.737513 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.737531 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.737556 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.737573 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:32Z","lastTransitionTime":"2025-12-10T06:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.840567 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.840656 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.840673 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.840923 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.840941 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:32Z","lastTransitionTime":"2025-12-10T06:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.943026 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.943102 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.943114 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.943133 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:32 crc kubenswrapper[4765]: I1210 06:49:32.943144 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:32Z","lastTransitionTime":"2025-12-10T06:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.046020 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.046062 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.046074 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.046223 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.046255 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:33Z","lastTransitionTime":"2025-12-10T06:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.149630 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.149667 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.149678 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.149694 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.149706 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:33Z","lastTransitionTime":"2025-12-10T06:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.252312 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.252362 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.252374 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.252392 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.252403 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:33Z","lastTransitionTime":"2025-12-10T06:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.354701 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.354746 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.354754 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.354770 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.354782 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:33Z","lastTransitionTime":"2025-12-10T06:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.457035 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.457079 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.457104 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.457124 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.457137 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:33Z","lastTransitionTime":"2025-12-10T06:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.558986 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.559017 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.559028 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.559044 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.559054 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:33Z","lastTransitionTime":"2025-12-10T06:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.587889 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:33 crc kubenswrapper[4765]: E1210 06:49:33.588037 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.587889 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:33 crc kubenswrapper[4765]: E1210 06:49:33.588220 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.662202 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.662265 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.662278 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.662300 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.662313 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:33Z","lastTransitionTime":"2025-12-10T06:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.764879 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.764925 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.764936 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.764956 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.764967 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:33Z","lastTransitionTime":"2025-12-10T06:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.868434 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.868492 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.868507 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.868528 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.868543 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:33Z","lastTransitionTime":"2025-12-10T06:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.970983 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.971025 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.971061 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.971077 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:33 crc kubenswrapper[4765]: I1210 06:49:33.971103 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:33Z","lastTransitionTime":"2025-12-10T06:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.073647 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.073687 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.073698 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.073717 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.073773 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:34Z","lastTransitionTime":"2025-12-10T06:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.175449 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.175479 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.175487 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.175502 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.175512 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:34Z","lastTransitionTime":"2025-12-10T06:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.195027 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.195067 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.195100 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.195120 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.195132 4765 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T06:49:34Z","lastTransitionTime":"2025-12-10T06:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.228895 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-t8knp" podStartSLOduration=76.228879495 podStartE2EDuration="1m16.228879495s" podCreationTimestamp="2025-12-10 06:48:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:49:30.887818942 +0000 UTC m=+90.614484258" watchObservedRunningTime="2025-12-10 06:49:34.228879495 +0000 UTC m=+93.955544811" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.229463 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm"] Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.229850 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.231573 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.231587 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.232025 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.235157 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.289876 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74d863c9-6640-4c1f-beeb-16a5c49dd105-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-tn9wm\" (UID: \"74d863c9-6640-4c1f-beeb-16a5c49dd105\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.289951 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/74d863c9-6640-4c1f-beeb-16a5c49dd105-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-tn9wm\" (UID: \"74d863c9-6640-4c1f-beeb-16a5c49dd105\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.290009 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/74d863c9-6640-4c1f-beeb-16a5c49dd105-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-tn9wm\" (UID: \"74d863c9-6640-4c1f-beeb-16a5c49dd105\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.290033 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/74d863c9-6640-4c1f-beeb-16a5c49dd105-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-tn9wm\" (UID: \"74d863c9-6640-4c1f-beeb-16a5c49dd105\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.290048 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/74d863c9-6640-4c1f-beeb-16a5c49dd105-service-ca\") pod \"cluster-version-operator-5c965bbfc6-tn9wm\" (UID: \"74d863c9-6640-4c1f-beeb-16a5c49dd105\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.390915 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/74d863c9-6640-4c1f-beeb-16a5c49dd105-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-tn9wm\" (UID: \"74d863c9-6640-4c1f-beeb-16a5c49dd105\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.390979 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/74d863c9-6640-4c1f-beeb-16a5c49dd105-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-tn9wm\" (UID: \"74d863c9-6640-4c1f-beeb-16a5c49dd105\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.391008 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/74d863c9-6640-4c1f-beeb-16a5c49dd105-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-tn9wm\" (UID: \"74d863c9-6640-4c1f-beeb-16a5c49dd105\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.391026 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/74d863c9-6640-4c1f-beeb-16a5c49dd105-service-ca\") pod \"cluster-version-operator-5c965bbfc6-tn9wm\" (UID: \"74d863c9-6640-4c1f-beeb-16a5c49dd105\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.391044 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74d863c9-6640-4c1f-beeb-16a5c49dd105-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-tn9wm\" (UID: \"74d863c9-6640-4c1f-beeb-16a5c49dd105\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.391131 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/74d863c9-6640-4c1f-beeb-16a5c49dd105-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-tn9wm\" (UID: \"74d863c9-6640-4c1f-beeb-16a5c49dd105\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.391115 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/74d863c9-6640-4c1f-beeb-16a5c49dd105-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-tn9wm\" (UID: \"74d863c9-6640-4c1f-beeb-16a5c49dd105\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.392351 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/74d863c9-6640-4c1f-beeb-16a5c49dd105-service-ca\") pod \"cluster-version-operator-5c965bbfc6-tn9wm\" (UID: \"74d863c9-6640-4c1f-beeb-16a5c49dd105\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.398092 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74d863c9-6640-4c1f-beeb-16a5c49dd105-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-tn9wm\" (UID: \"74d863c9-6640-4c1f-beeb-16a5c49dd105\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.407636 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/74d863c9-6640-4c1f-beeb-16a5c49dd105-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-tn9wm\" (UID: \"74d863c9-6640-4c1f-beeb-16a5c49dd105\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.543594 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.588959 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:34 crc kubenswrapper[4765]: E1210 06:49:34.589112 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:34 crc kubenswrapper[4765]: I1210 06:49:34.590008 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:34 crc kubenswrapper[4765]: E1210 06:49:34.590357 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:35 crc kubenswrapper[4765]: I1210 06:49:35.001323 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" event={"ID":"74d863c9-6640-4c1f-beeb-16a5c49dd105","Type":"ContainerStarted","Data":"447e73f1eca6e950b9f16108a48c0530f1a87f2a0490a12e1adbc37dbe5ccdef"} Dec 10 06:49:35 crc kubenswrapper[4765]: I1210 06:49:35.001652 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" event={"ID":"74d863c9-6640-4c1f-beeb-16a5c49dd105","Type":"ContainerStarted","Data":"8b1ea0541c88a6a0066efa5d5024fb09254bcbd16fe15cd78aa75ee5954cef93"} Dec 10 06:49:35 crc kubenswrapper[4765]: I1210 06:49:35.013835 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tn9wm" podStartSLOduration=75.013818249 podStartE2EDuration="1m15.013818249s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:49:35.013592193 +0000 UTC m=+94.740257509" watchObservedRunningTime="2025-12-10 06:49:35.013818249 +0000 UTC m=+94.740483565" Dec 10 06:49:35 crc kubenswrapper[4765]: I1210 06:49:35.588691 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:35 crc kubenswrapper[4765]: I1210 06:49:35.588707 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:35 crc kubenswrapper[4765]: E1210 06:49:35.588820 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:35 crc kubenswrapper[4765]: E1210 06:49:35.588924 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:36 crc kubenswrapper[4765]: I1210 06:49:36.588509 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:36 crc kubenswrapper[4765]: I1210 06:49:36.588530 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:36 crc kubenswrapper[4765]: E1210 06:49:36.588641 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:36 crc kubenswrapper[4765]: E1210 06:49:36.588714 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:37 crc kubenswrapper[4765]: I1210 06:49:37.588057 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:37 crc kubenswrapper[4765]: I1210 06:49:37.588057 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:37 crc kubenswrapper[4765]: E1210 06:49:37.588259 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:37 crc kubenswrapper[4765]: E1210 06:49:37.588429 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:37 crc kubenswrapper[4765]: I1210 06:49:37.924954 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs\") pod \"network-metrics-daemon-k9sld\" (UID: \"efb71311-50ec-4765-8caf-6f2e02b8dce9\") " pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:37 crc kubenswrapper[4765]: E1210 06:49:37.925127 4765 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 06:49:37 crc kubenswrapper[4765]: E1210 06:49:37.925180 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs podName:efb71311-50ec-4765-8caf-6f2e02b8dce9 nodeName:}" failed. No retries permitted until 2025-12-10 06:50:41.925165847 +0000 UTC m=+161.651831163 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs") pod "network-metrics-daemon-k9sld" (UID: "efb71311-50ec-4765-8caf-6f2e02b8dce9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 06:49:38 crc kubenswrapper[4765]: I1210 06:49:38.588778 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:38 crc kubenswrapper[4765]: E1210 06:49:38.588888 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:38 crc kubenswrapper[4765]: I1210 06:49:38.588900 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:38 crc kubenswrapper[4765]: E1210 06:49:38.589031 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:38 crc kubenswrapper[4765]: I1210 06:49:38.589751 4765 scope.go:117] "RemoveContainer" containerID="e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839" Dec 10 06:49:38 crc kubenswrapper[4765]: E1210 06:49:38.589933 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" Dec 10 06:49:39 crc kubenswrapper[4765]: I1210 06:49:39.588002 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:39 crc kubenswrapper[4765]: I1210 06:49:39.588117 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:39 crc kubenswrapper[4765]: E1210 06:49:39.588275 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:39 crc kubenswrapper[4765]: E1210 06:49:39.588483 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:40 crc kubenswrapper[4765]: I1210 06:49:40.588420 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:40 crc kubenswrapper[4765]: I1210 06:49:40.588551 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:40 crc kubenswrapper[4765]: E1210 06:49:40.590024 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:40 crc kubenswrapper[4765]: E1210 06:49:40.590186 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:41 crc kubenswrapper[4765]: I1210 06:49:41.588851 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:41 crc kubenswrapper[4765]: I1210 06:49:41.588873 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:41 crc kubenswrapper[4765]: E1210 06:49:41.589108 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:41 crc kubenswrapper[4765]: E1210 06:49:41.588990 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:42 crc kubenswrapper[4765]: I1210 06:49:42.588036 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:42 crc kubenswrapper[4765]: E1210 06:49:42.588181 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:42 crc kubenswrapper[4765]: I1210 06:49:42.588213 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:42 crc kubenswrapper[4765]: E1210 06:49:42.588338 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:43 crc kubenswrapper[4765]: I1210 06:49:43.588580 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:43 crc kubenswrapper[4765]: I1210 06:49:43.588616 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:43 crc kubenswrapper[4765]: E1210 06:49:43.588695 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:43 crc kubenswrapper[4765]: E1210 06:49:43.588745 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:44 crc kubenswrapper[4765]: I1210 06:49:44.588911 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:44 crc kubenswrapper[4765]: I1210 06:49:44.588911 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:44 crc kubenswrapper[4765]: E1210 06:49:44.589069 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:44 crc kubenswrapper[4765]: E1210 06:49:44.589204 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:45 crc kubenswrapper[4765]: I1210 06:49:45.588802 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:45 crc kubenswrapper[4765]: E1210 06:49:45.588942 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:45 crc kubenswrapper[4765]: I1210 06:49:45.589203 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:45 crc kubenswrapper[4765]: E1210 06:49:45.589273 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:46 crc kubenswrapper[4765]: I1210 06:49:46.588986 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:46 crc kubenswrapper[4765]: I1210 06:49:46.588986 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:46 crc kubenswrapper[4765]: E1210 06:49:46.589257 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:46 crc kubenswrapper[4765]: E1210 06:49:46.589420 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:47 crc kubenswrapper[4765]: I1210 06:49:47.588628 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:47 crc kubenswrapper[4765]: I1210 06:49:47.588720 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:47 crc kubenswrapper[4765]: E1210 06:49:47.588755 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:47 crc kubenswrapper[4765]: E1210 06:49:47.588857 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:48 crc kubenswrapper[4765]: I1210 06:49:48.588400 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:48 crc kubenswrapper[4765]: I1210 06:49:48.588717 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:48 crc kubenswrapper[4765]: E1210 06:49:48.588857 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:48 crc kubenswrapper[4765]: E1210 06:49:48.589035 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:49 crc kubenswrapper[4765]: I1210 06:49:49.588412 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:49 crc kubenswrapper[4765]: I1210 06:49:49.588444 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:49 crc kubenswrapper[4765]: E1210 06:49:49.588533 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:49 crc kubenswrapper[4765]: E1210 06:49:49.588600 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:50 crc kubenswrapper[4765]: I1210 06:49:50.588739 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:50 crc kubenswrapper[4765]: I1210 06:49:50.588826 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:50 crc kubenswrapper[4765]: E1210 06:49:50.589943 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:50 crc kubenswrapper[4765]: E1210 06:49:50.590003 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:51 crc kubenswrapper[4765]: I1210 06:49:51.588904 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:51 crc kubenswrapper[4765]: I1210 06:49:51.589000 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:51 crc kubenswrapper[4765]: E1210 06:49:51.589070 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:51 crc kubenswrapper[4765]: E1210 06:49:51.589209 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:52 crc kubenswrapper[4765]: I1210 06:49:52.588495 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:52 crc kubenswrapper[4765]: I1210 06:49:52.588532 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:52 crc kubenswrapper[4765]: E1210 06:49:52.588638 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:52 crc kubenswrapper[4765]: E1210 06:49:52.588711 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:53 crc kubenswrapper[4765]: I1210 06:49:53.588955 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:53 crc kubenswrapper[4765]: I1210 06:49:53.588987 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:53 crc kubenswrapper[4765]: E1210 06:49:53.589108 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:53 crc kubenswrapper[4765]: E1210 06:49:53.589227 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:53 crc kubenswrapper[4765]: I1210 06:49:53.589943 4765 scope.go:117] "RemoveContainer" containerID="e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839" Dec 10 06:49:53 crc kubenswrapper[4765]: E1210 06:49:53.590203 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5wj7r_openshift-ovn-kubernetes(8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" Dec 10 06:49:54 crc kubenswrapper[4765]: I1210 06:49:54.062884 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r78vd_d5227381-9852-49ce-96f1-220c42aab12a/kube-multus/1.log" Dec 10 06:49:54 crc kubenswrapper[4765]: I1210 06:49:54.063567 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r78vd_d5227381-9852-49ce-96f1-220c42aab12a/kube-multus/0.log" Dec 10 06:49:54 crc kubenswrapper[4765]: I1210 06:49:54.063627 4765 generic.go:334] "Generic (PLEG): container finished" podID="d5227381-9852-49ce-96f1-220c42aab12a" containerID="1bd98078a25ccee3c5a9a8905539343e090e5867d0c53c6a6966075c2d0cc6ae" exitCode=1 Dec 10 06:49:54 crc kubenswrapper[4765]: I1210 06:49:54.063659 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-r78vd" event={"ID":"d5227381-9852-49ce-96f1-220c42aab12a","Type":"ContainerDied","Data":"1bd98078a25ccee3c5a9a8905539343e090e5867d0c53c6a6966075c2d0cc6ae"} Dec 10 06:49:54 crc kubenswrapper[4765]: I1210 06:49:54.063697 4765 scope.go:117] "RemoveContainer" containerID="ca43aa46549be48f392081b91858eae15efafa12ab91fb1d313c88199bfb03ce" Dec 10 06:49:54 crc kubenswrapper[4765]: I1210 06:49:54.064169 4765 scope.go:117] "RemoveContainer" containerID="1bd98078a25ccee3c5a9a8905539343e090e5867d0c53c6a6966075c2d0cc6ae" Dec 10 06:49:54 crc kubenswrapper[4765]: E1210 06:49:54.064314 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-r78vd_openshift-multus(d5227381-9852-49ce-96f1-220c42aab12a)\"" pod="openshift-multus/multus-r78vd" podUID="d5227381-9852-49ce-96f1-220c42aab12a" Dec 10 06:49:54 crc kubenswrapper[4765]: I1210 06:49:54.588132 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:54 crc kubenswrapper[4765]: E1210 06:49:54.588279 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:54 crc kubenswrapper[4765]: I1210 06:49:54.588689 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:54 crc kubenswrapper[4765]: E1210 06:49:54.588841 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:55 crc kubenswrapper[4765]: I1210 06:49:55.069243 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r78vd_d5227381-9852-49ce-96f1-220c42aab12a/kube-multus/1.log" Dec 10 06:49:55 crc kubenswrapper[4765]: I1210 06:49:55.588669 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:55 crc kubenswrapper[4765]: I1210 06:49:55.588745 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:55 crc kubenswrapper[4765]: E1210 06:49:55.588903 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:55 crc kubenswrapper[4765]: E1210 06:49:55.589029 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:56 crc kubenswrapper[4765]: I1210 06:49:56.588429 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:56 crc kubenswrapper[4765]: I1210 06:49:56.588477 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:56 crc kubenswrapper[4765]: E1210 06:49:56.588602 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:56 crc kubenswrapper[4765]: E1210 06:49:56.588738 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:57 crc kubenswrapper[4765]: I1210 06:49:57.588159 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:57 crc kubenswrapper[4765]: I1210 06:49:57.588275 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:57 crc kubenswrapper[4765]: E1210 06:49:57.588627 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:49:57 crc kubenswrapper[4765]: E1210 06:49:57.588891 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:58 crc kubenswrapper[4765]: I1210 06:49:58.588633 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:49:58 crc kubenswrapper[4765]: I1210 06:49:58.588732 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:49:58 crc kubenswrapper[4765]: E1210 06:49:58.588874 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:49:58 crc kubenswrapper[4765]: E1210 06:49:58.589054 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:49:59 crc kubenswrapper[4765]: I1210 06:49:59.588608 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:49:59 crc kubenswrapper[4765]: E1210 06:49:59.588732 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:49:59 crc kubenswrapper[4765]: I1210 06:49:59.588613 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:49:59 crc kubenswrapper[4765]: E1210 06:49:59.588983 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:50:00 crc kubenswrapper[4765]: E1210 06:50:00.548509 4765 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 10 06:50:00 crc kubenswrapper[4765]: I1210 06:50:00.588331 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:50:00 crc kubenswrapper[4765]: I1210 06:50:00.588399 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:50:00 crc kubenswrapper[4765]: E1210 06:50:00.589484 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:50:00 crc kubenswrapper[4765]: E1210 06:50:00.589573 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:50:00 crc kubenswrapper[4765]: E1210 06:50:00.665641 4765 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 10 06:50:01 crc kubenswrapper[4765]: I1210 06:50:01.588811 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:50:01 crc kubenswrapper[4765]: I1210 06:50:01.588856 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:50:01 crc kubenswrapper[4765]: E1210 06:50:01.588966 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:50:01 crc kubenswrapper[4765]: E1210 06:50:01.589142 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:50:02 crc kubenswrapper[4765]: I1210 06:50:02.588906 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:50:02 crc kubenswrapper[4765]: E1210 06:50:02.589049 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:50:02 crc kubenswrapper[4765]: I1210 06:50:02.588921 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:50:02 crc kubenswrapper[4765]: E1210 06:50:02.589268 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:50:03 crc kubenswrapper[4765]: I1210 06:50:03.588405 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:50:03 crc kubenswrapper[4765]: I1210 06:50:03.588465 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:50:03 crc kubenswrapper[4765]: E1210 06:50:03.588552 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:50:03 crc kubenswrapper[4765]: E1210 06:50:03.588632 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:50:04 crc kubenswrapper[4765]: I1210 06:50:04.588421 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:50:04 crc kubenswrapper[4765]: E1210 06:50:04.588561 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:50:04 crc kubenswrapper[4765]: I1210 06:50:04.588657 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:50:04 crc kubenswrapper[4765]: E1210 06:50:04.588806 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:50:05 crc kubenswrapper[4765]: I1210 06:50:05.588613 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:50:05 crc kubenswrapper[4765]: I1210 06:50:05.588651 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:50:05 crc kubenswrapper[4765]: E1210 06:50:05.588765 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:50:05 crc kubenswrapper[4765]: E1210 06:50:05.588881 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:50:05 crc kubenswrapper[4765]: E1210 06:50:05.666992 4765 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 10 06:50:06 crc kubenswrapper[4765]: I1210 06:50:06.588688 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:50:06 crc kubenswrapper[4765]: E1210 06:50:06.588817 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:50:06 crc kubenswrapper[4765]: I1210 06:50:06.589703 4765 scope.go:117] "RemoveContainer" containerID="e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839" Dec 10 06:50:06 crc kubenswrapper[4765]: I1210 06:50:06.590016 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:50:06 crc kubenswrapper[4765]: E1210 06:50:06.590097 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:50:06 crc kubenswrapper[4765]: I1210 06:50:06.590392 4765 scope.go:117] "RemoveContainer" containerID="1bd98078a25ccee3c5a9a8905539343e090e5867d0c53c6a6966075c2d0cc6ae" Dec 10 06:50:07 crc kubenswrapper[4765]: I1210 06:50:07.103784 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovnkube-controller/3.log" Dec 10 06:50:07 crc kubenswrapper[4765]: I1210 06:50:07.106916 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerStarted","Data":"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495"} Dec 10 06:50:07 crc kubenswrapper[4765]: I1210 06:50:07.107307 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:50:07 crc kubenswrapper[4765]: I1210 06:50:07.112199 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r78vd_d5227381-9852-49ce-96f1-220c42aab12a/kube-multus/1.log" Dec 10 06:50:07 crc kubenswrapper[4765]: I1210 06:50:07.114569 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-r78vd" event={"ID":"d5227381-9852-49ce-96f1-220c42aab12a","Type":"ContainerStarted","Data":"b68f4e569b7eeef5928122abf99162669328f71c395b2c29915061eb90e372b3"} Dec 10 06:50:07 crc kubenswrapper[4765]: I1210 06:50:07.140994 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podStartSLOduration=107.140973116 podStartE2EDuration="1m47.140973116s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:07.139612477 +0000 UTC m=+126.866277793" watchObservedRunningTime="2025-12-10 06:50:07.140973116 +0000 UTC m=+126.867638432" Dec 10 06:50:07 crc kubenswrapper[4765]: I1210 06:50:07.588720 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:50:07 crc kubenswrapper[4765]: E1210 06:50:07.589056 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:50:07 crc kubenswrapper[4765]: I1210 06:50:07.588731 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:50:07 crc kubenswrapper[4765]: E1210 06:50:07.589336 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:50:07 crc kubenswrapper[4765]: I1210 06:50:07.682068 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-k9sld"] Dec 10 06:50:08 crc kubenswrapper[4765]: I1210 06:50:08.117880 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:50:08 crc kubenswrapper[4765]: E1210 06:50:08.117987 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:50:08 crc kubenswrapper[4765]: I1210 06:50:08.588677 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:50:08 crc kubenswrapper[4765]: E1210 06:50:08.588863 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:50:08 crc kubenswrapper[4765]: I1210 06:50:08.589034 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:50:08 crc kubenswrapper[4765]: E1210 06:50:08.589117 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:50:09 crc kubenswrapper[4765]: I1210 06:50:09.588720 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:50:09 crc kubenswrapper[4765]: I1210 06:50:09.588841 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:50:09 crc kubenswrapper[4765]: E1210 06:50:09.589058 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 06:50:09 crc kubenswrapper[4765]: E1210 06:50:09.588857 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k9sld" podUID="efb71311-50ec-4765-8caf-6f2e02b8dce9" Dec 10 06:50:10 crc kubenswrapper[4765]: I1210 06:50:10.588197 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:50:10 crc kubenswrapper[4765]: I1210 06:50:10.588361 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:50:10 crc kubenswrapper[4765]: E1210 06:50:10.589405 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 06:50:10 crc kubenswrapper[4765]: E1210 06:50:10.589525 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 06:50:11 crc kubenswrapper[4765]: I1210 06:50:11.588147 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:50:11 crc kubenswrapper[4765]: I1210 06:50:11.588155 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:50:11 crc kubenswrapper[4765]: I1210 06:50:11.590407 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 10 06:50:11 crc kubenswrapper[4765]: I1210 06:50:11.590579 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 10 06:50:11 crc kubenswrapper[4765]: I1210 06:50:11.590621 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 10 06:50:11 crc kubenswrapper[4765]: I1210 06:50:11.590891 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 10 06:50:12 crc kubenswrapper[4765]: I1210 06:50:12.588925 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:50:12 crc kubenswrapper[4765]: I1210 06:50:12.588925 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:50:12 crc kubenswrapper[4765]: I1210 06:50:12.590613 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 10 06:50:12 crc kubenswrapper[4765]: I1210 06:50:12.590736 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.101195 4765 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.141560 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-gvn7z"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.141979 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9rf4c"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.142226 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.142454 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.144024 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.144552 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-9rf4c" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.148956 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-f6lqh"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.149335 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-cq24j"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.149525 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-44knz"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.149850 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44knz" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.150563 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.150822 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.152200 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.152741 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.153015 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.153367 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.153627 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-fv6zj"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.153723 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.153791 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.153956 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.154034 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.154157 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.154270 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.154390 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.154541 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.154604 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8slv4"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.155017 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8slv4" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.155393 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.156051 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.156167 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.156673 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.156735 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.156764 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.156838 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.156972 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.157186 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.157363 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.157495 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.157597 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.157649 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.157693 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.157737 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.157649 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b62zd"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.162609 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rz6tg"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.163333 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rz6tg" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.163664 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b62zd" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.170028 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.174852 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.197046 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.197341 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-49sgb"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.197774 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-7m9zf"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.197941 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.198084 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-7m9zf" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.198342 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-49sgb" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.198884 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-ljp6b"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.199161 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.199286 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.199533 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.199829 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.200039 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.202594 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.207264 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-lxxm6"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.207719 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-cq24j"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.207741 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9rf4c"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.207824 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-lxxm6" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.209659 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.210849 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.211029 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.211239 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.211353 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.211464 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.211503 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.211371 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.214132 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.214548 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.214773 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.214936 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.214990 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.214885 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.214897 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.219736 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.219885 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.220042 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.220381 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.220516 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.220602 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.220704 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.220804 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.222097 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.222567 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.222678 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.222868 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.222952 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.223061 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.223271 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.222872 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.222878 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.223727 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-gvn7z"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.223774 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.223819 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.223875 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.224269 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.224426 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.224559 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.224668 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wjlkl"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.225176 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.226136 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-qsz8j"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.226790 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-qsz8j" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.226836 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229084 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229079 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229201 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229436 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e33fd3b0-8406-4675-91fc-ed8b1b3e7cef-config\") pod \"machine-api-operator-5694c8668f-9rf4c\" (UID: \"e33fd3b0-8406-4675-91fc-ed8b1b3e7cef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9rf4c" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229465 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6d03cd7a-f608-45b5-901c-b01678e4b69a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229517 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6d03cd7a-f608-45b5-901c-b01678e4b69a-etcd-serving-ca\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229534 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6d03cd7a-f608-45b5-901c-b01678e4b69a-node-pullsecrets\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229548 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6d03cd7a-f608-45b5-901c-b01678e4b69a-encryption-config\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229564 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-config\") pod \"route-controller-manager-6576b87f9c-w787d\" (UID: \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229583 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2r5g8\" (UniqueName: \"kubernetes.io/projected/e33fd3b0-8406-4675-91fc-ed8b1b3e7cef-kube-api-access-2r5g8\") pod \"machine-api-operator-5694c8668f-9rf4c\" (UID: \"e33fd3b0-8406-4675-91fc-ed8b1b3e7cef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9rf4c" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229597 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229598 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-serving-cert\") pod \"route-controller-manager-6576b87f9c-w787d\" (UID: \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229681 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/e33fd3b0-8406-4675-91fc-ed8b1b3e7cef-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9rf4c\" (UID: \"e33fd3b0-8406-4675-91fc-ed8b1b3e7cef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9rf4c" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229698 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-client-ca\") pod \"route-controller-manager-6576b87f9c-w787d\" (UID: \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229712 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6d03cd7a-f608-45b5-901c-b01678e4b69a-audit-dir\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229729 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/6d03cd7a-f608-45b5-901c-b01678e4b69a-audit\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229742 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d03cd7a-f608-45b5-901c-b01678e4b69a-serving-cert\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229760 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d03cd7a-f608-45b5-901c-b01678e4b69a-config\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229775 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5s6r\" (UniqueName: \"kubernetes.io/projected/6d03cd7a-f608-45b5-901c-b01678e4b69a-kube-api-access-c5s6r\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229792 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gc2hr\" (UniqueName: \"kubernetes.io/projected/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-kube-api-access-gc2hr\") pod \"route-controller-manager-6576b87f9c-w787d\" (UID: \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229807 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6d03cd7a-f608-45b5-901c-b01678e4b69a-etcd-client\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229827 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/6d03cd7a-f608-45b5-901c-b01678e4b69a-image-import-ca\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.229848 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e33fd3b0-8406-4675-91fc-ed8b1b3e7cef-images\") pod \"machine-api-operator-5694c8668f-9rf4c\" (UID: \"e33fd3b0-8406-4675-91fc-ed8b1b3e7cef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9rf4c" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.230492 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.230711 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.231124 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.231221 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.231305 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.231704 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.231704 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.231732 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.231746 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.231704 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.231838 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.231889 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.231905 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.231916 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.231933 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.231970 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.232005 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.232028 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.232058 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.232129 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.232138 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.232238 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-svfhs"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.232711 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.232824 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-d8jqz"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.233231 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-d8jqz" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.233406 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.233500 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.233651 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.235736 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.235949 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.253193 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.287693 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.289417 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.290686 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-d48l7"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.291392 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-d48l7" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.292214 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.293291 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.294679 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-tzv7k"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.295089 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.295262 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.295514 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.298718 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nrlwx"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.303641 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2n9v8"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.303977 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.304319 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-s5sr5"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.304842 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-s5sr5" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.296470 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.305153 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2n9v8" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.305325 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nrlwx" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.297936 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.305750 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.306149 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j692h"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.306707 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j692h" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.308307 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.308756 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-m9mvq"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.310586 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-m9mvq" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.309789 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.310909 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-tcrlf"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.317143 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6shb9"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.318174 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-f6lqh"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.318212 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-49sgb"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.318229 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-ljp6b"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.318245 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bkcmk"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.318314 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-6shb9" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.318672 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-tcrlf" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.318718 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.318988 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wmcvd"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.319484 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wmcvd" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.320559 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.322255 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.323851 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.327721 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7p95x"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.327943 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.331677 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5c2j\" (UniqueName: \"kubernetes.io/projected/82306cc4-ec57-498a-8481-5832db533206-kube-api-access-l5c2j\") pod \"openshift-config-operator-7777fb866f-49sgb\" (UID: \"82306cc4-ec57-498a-8481-5832db533206\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-49sgb" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.331715 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whzf2\" (UniqueName: \"kubernetes.io/projected/3ce9501a-7d19-42bf-94fc-b63427ef3c12-kube-api-access-whzf2\") pod \"router-default-5444994796-svfhs\" (UID: \"3ce9501a-7d19-42bf-94fc-b63427ef3c12\") " pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.331776 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47211fde-bde0-457f-8336-c46af9d7ee00-config\") pod \"console-operator-58897d9998-7m9zf\" (UID: \"47211fde-bde0-457f-8336-c46af9d7ee00\") " pod="openshift-console-operator/console-operator-58897d9998-7m9zf" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.331811 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5s6r\" (UniqueName: \"kubernetes.io/projected/6d03cd7a-f608-45b5-901c-b01678e4b69a-kube-api-access-c5s6r\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.331842 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gc2hr\" (UniqueName: \"kubernetes.io/projected/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-kube-api-access-gc2hr\") pod \"route-controller-manager-6576b87f9c-w787d\" (UID: \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.331864 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc4688ee-1a54-48c3-b328-855143ddfe38-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-8slv4\" (UID: \"cc4688ee-1a54-48c3-b328-855143ddfe38\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8slv4" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.331889 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6d03cd7a-f608-45b5-901c-b01678e4b69a-etcd-client\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.331909 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/6d03cd7a-f608-45b5-901c-b01678e4b69a-image-import-ca\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.331932 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdrd4\" (UniqueName: \"kubernetes.io/projected/cec358c7-a361-4654-9800-6e275a41c909-kube-api-access-pdrd4\") pod \"machine-approver-56656f9798-44knz\" (UID: \"cec358c7-a361-4654-9800-6e275a41c909\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44knz" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.331954 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/468222f3-9fe0-4909-b72b-ec659ca3908e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-wpqgk\" (UID: \"468222f3-9fe0-4909-b72b-ec659ca3908e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.331982 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c1ec79a4-45cc-43dd-883e-c6623922eff0-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-cq24j\" (UID: \"c1ec79a4-45cc-43dd-883e-c6623922eff0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.332014 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjqws\" (UniqueName: \"kubernetes.io/projected/c1ec79a4-45cc-43dd-883e-c6623922eff0-kube-api-access-xjqws\") pod \"authentication-operator-69f744f599-cq24j\" (UID: \"c1ec79a4-45cc-43dd-883e-c6623922eff0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.332052 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/468222f3-9fe0-4909-b72b-ec659ca3908e-images\") pod \"machine-config-operator-74547568cd-wpqgk\" (UID: \"468222f3-9fe0-4909-b72b-ec659ca3908e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.332088 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/47211fde-bde0-457f-8336-c46af9d7ee00-trusted-ca\") pod \"console-operator-58897d9998-7m9zf\" (UID: \"47211fde-bde0-457f-8336-c46af9d7ee00\") " pod="openshift-console-operator/console-operator-58897d9998-7m9zf" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.332178 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-f6lqh\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.332209 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e33fd3b0-8406-4675-91fc-ed8b1b3e7cef-images\") pod \"machine-api-operator-5694c8668f-9rf4c\" (UID: \"e33fd3b0-8406-4675-91fc-ed8b1b3e7cef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9rf4c" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.332234 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cec358c7-a361-4654-9800-6e275a41c909-auth-proxy-config\") pod \"machine-approver-56656f9798-44knz\" (UID: \"cec358c7-a361-4654-9800-6e275a41c909\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44knz" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.332341 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c1ec79a4-45cc-43dd-883e-c6623922eff0-service-ca-bundle\") pod \"authentication-operator-69f744f599-cq24j\" (UID: \"c1ec79a4-45cc-43dd-883e-c6623922eff0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.333029 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.333116 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3ce9501a-7d19-42bf-94fc-b63427ef3c12-default-certificate\") pod \"router-default-5444994796-svfhs\" (UID: \"3ce9501a-7d19-42bf-94fc-b63427ef3c12\") " pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.333200 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1ec79a4-45cc-43dd-883e-c6623922eff0-config\") pod \"authentication-operator-69f744f599-cq24j\" (UID: \"c1ec79a4-45cc-43dd-883e-c6623922eff0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.333911 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e33fd3b0-8406-4675-91fc-ed8b1b3e7cef-images\") pod \"machine-api-operator-5694c8668f-9rf4c\" (UID: \"e33fd3b0-8406-4675-91fc-ed8b1b3e7cef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9rf4c" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.333959 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/6d03cd7a-f608-45b5-901c-b01678e4b69a-image-import-ca\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.334489 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/301fe92b-b1fa-46ce-a9f1-97c23550e4b9-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-b62zd\" (UID: \"301fe92b-b1fa-46ce-a9f1-97c23550e4b9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b62zd" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.334529 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/cec358c7-a361-4654-9800-6e275a41c909-machine-approver-tls\") pod \"machine-approver-56656f9798-44knz\" (UID: \"cec358c7-a361-4654-9800-6e275a41c909\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44knz" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.334552 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8xpw\" (UniqueName: \"kubernetes.io/projected/468222f3-9fe0-4909-b72b-ec659ca3908e-kube-api-access-t8xpw\") pod \"machine-config-operator-74547568cd-wpqgk\" (UID: \"468222f3-9fe0-4909-b72b-ec659ca3908e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.334577 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-config\") pod \"controller-manager-879f6c89f-f6lqh\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.334610 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e33fd3b0-8406-4675-91fc-ed8b1b3e7cef-config\") pod \"machine-api-operator-5694c8668f-9rf4c\" (UID: \"e33fd3b0-8406-4675-91fc-ed8b1b3e7cef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9rf4c" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.334638 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3ce9501a-7d19-42bf-94fc-b63427ef3c12-metrics-certs\") pod \"router-default-5444994796-svfhs\" (UID: \"3ce9501a-7d19-42bf-94fc-b63427ef3c12\") " pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.334679 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6d03cd7a-f608-45b5-901c-b01678e4b69a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.334734 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82306cc4-ec57-498a-8481-5832db533206-serving-cert\") pod \"openshift-config-operator-7777fb866f-49sgb\" (UID: \"82306cc4-ec57-498a-8481-5832db533206\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-49sgb" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.334772 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40915631-1b59-4783-8633-88ea61ed4814-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-rz6tg\" (UID: \"40915631-1b59-4783-8633-88ea61ed4814\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rz6tg" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.334821 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/468222f3-9fe0-4909-b72b-ec659ca3908e-proxy-tls\") pod \"machine-config-operator-74547568cd-wpqgk\" (UID: \"468222f3-9fe0-4909-b72b-ec659ca3908e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.334865 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6d03cd7a-f608-45b5-901c-b01678e4b69a-etcd-serving-ca\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.334911 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40915631-1b59-4783-8633-88ea61ed4814-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-rz6tg\" (UID: \"40915631-1b59-4783-8633-88ea61ed4814\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rz6tg" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.334957 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-client-ca\") pod \"controller-manager-879f6c89f-f6lqh\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.334981 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3ce9501a-7d19-42bf-94fc-b63427ef3c12-stats-auth\") pod \"router-default-5444994796-svfhs\" (UID: \"3ce9501a-7d19-42bf-94fc-b63427ef3c12\") " pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.335007 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6d03cd7a-f608-45b5-901c-b01678e4b69a-node-pullsecrets\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.335033 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6d03cd7a-f608-45b5-901c-b01678e4b69a-encryption-config\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.335057 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cec358c7-a361-4654-9800-6e275a41c909-config\") pod \"machine-approver-56656f9798-44knz\" (UID: \"cec358c7-a361-4654-9800-6e275a41c909\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44knz" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.335083 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/47211fde-bde0-457f-8336-c46af9d7ee00-serving-cert\") pod \"console-operator-58897d9998-7m9zf\" (UID: \"47211fde-bde0-457f-8336-c46af9d7ee00\") " pod="openshift-console-operator/console-operator-58897d9998-7m9zf" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.335135 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/412763c1-3872-4843-9454-988bfa904c52-srv-cert\") pod \"catalog-operator-68c6474976-gzldr\" (UID: \"412763c1-3872-4843-9454-988bfa904c52\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.335169 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-config\") pod \"route-controller-manager-6576b87f9c-w787d\" (UID: \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.335196 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gglrh\" (UniqueName: \"kubernetes.io/projected/7b597651-9700-4c4b-9d2d-c21dc37c1959-kube-api-access-gglrh\") pod \"downloads-7954f5f757-lxxm6\" (UID: \"7b597651-9700-4c4b-9d2d-c21dc37c1959\") " pod="openshift-console/downloads-7954f5f757-lxxm6" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.336285 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/82306cc4-ec57-498a-8481-5832db533206-available-featuregates\") pod \"openshift-config-operator-7777fb866f-49sgb\" (UID: \"82306cc4-ec57-498a-8481-5832db533206\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-49sgb" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.336659 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6d03cd7a-f608-45b5-901c-b01678e4b69a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.336647 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ce9501a-7d19-42bf-94fc-b63427ef3c12-service-ca-bundle\") pod \"router-default-5444994796-svfhs\" (UID: \"3ce9501a-7d19-42bf-94fc-b63427ef3c12\") " pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.337157 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-serving-cert\") pod \"controller-manager-879f6c89f-f6lqh\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.337699 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2r5g8\" (UniqueName: \"kubernetes.io/projected/e33fd3b0-8406-4675-91fc-ed8b1b3e7cef-kube-api-access-2r5g8\") pod \"machine-api-operator-5694c8668f-9rf4c\" (UID: \"e33fd3b0-8406-4675-91fc-ed8b1b3e7cef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9rf4c" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.337755 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc4688ee-1a54-48c3-b328-855143ddfe38-config\") pod \"openshift-apiserver-operator-796bbdcf4f-8slv4\" (UID: \"cc4688ee-1a54-48c3-b328-855143ddfe38\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8slv4" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.337782 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-serving-cert\") pod \"route-controller-manager-6576b87f9c-w787d\" (UID: \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.337807 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/412763c1-3872-4843-9454-988bfa904c52-profile-collector-cert\") pod \"catalog-operator-68c6474976-gzldr\" (UID: \"412763c1-3872-4843-9454-988bfa904c52\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.337857 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6d03cd7a-f608-45b5-901c-b01678e4b69a-etcd-serving-ca\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.338494 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6d03cd7a-f608-45b5-901c-b01678e4b69a-node-pullsecrets\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.338615 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txgg6\" (UniqueName: \"kubernetes.io/projected/cc4688ee-1a54-48c3-b328-855143ddfe38-kube-api-access-txgg6\") pod \"openshift-apiserver-operator-796bbdcf4f-8slv4\" (UID: \"cc4688ee-1a54-48c3-b328-855143ddfe38\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8slv4" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.338648 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/e33fd3b0-8406-4675-91fc-ed8b1b3e7cef-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9rf4c\" (UID: \"e33fd3b0-8406-4675-91fc-ed8b1b3e7cef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9rf4c" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.338686 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-client-ca\") pod \"route-controller-manager-6576b87f9c-w787d\" (UID: \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.338706 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6d03cd7a-f608-45b5-901c-b01678e4b69a-audit-dir\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.338727 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gq8rh\" (UniqueName: \"kubernetes.io/projected/301fe92b-b1fa-46ce-a9f1-97c23550e4b9-kube-api-access-gq8rh\") pod \"cluster-samples-operator-665b6dd947-b62zd\" (UID: \"301fe92b-b1fa-46ce-a9f1-97c23550e4b9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b62zd" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.338763 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8tvj\" (UniqueName: \"kubernetes.io/projected/47211fde-bde0-457f-8336-c46af9d7ee00-kube-api-access-m8tvj\") pod \"console-operator-58897d9998-7m9zf\" (UID: \"47211fde-bde0-457f-8336-c46af9d7ee00\") " pod="openshift-console-operator/console-operator-58897d9998-7m9zf" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.338981 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/6d03cd7a-f608-45b5-901c-b01678e4b69a-audit\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.339009 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d03cd7a-f608-45b5-901c-b01678e4b69a-serving-cert\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.339027 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8xtw\" (UniqueName: \"kubernetes.io/projected/412763c1-3872-4843-9454-988bfa904c52-kube-api-access-m8xtw\") pod \"catalog-operator-68c6474976-gzldr\" (UID: \"412763c1-3872-4843-9454-988bfa904c52\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.339046 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d03cd7a-f608-45b5-901c-b01678e4b69a-config\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.339065 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gzqw\" (UniqueName: \"kubernetes.io/projected/40915631-1b59-4783-8633-88ea61ed4814-kube-api-access-9gzqw\") pod \"openshift-controller-manager-operator-756b6f6bc6-rz6tg\" (UID: \"40915631-1b59-4783-8633-88ea61ed4814\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rz6tg" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.339082 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c1ec79a4-45cc-43dd-883e-c6623922eff0-serving-cert\") pod \"authentication-operator-69f744f599-cq24j\" (UID: \"c1ec79a4-45cc-43dd-883e-c6623922eff0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.339100 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcnkb\" (UniqueName: \"kubernetes.io/projected/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-kube-api-access-kcnkb\") pod \"controller-manager-879f6c89f-f6lqh\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.339227 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6d03cd7a-f608-45b5-901c-b01678e4b69a-audit-dir\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.339992 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d03cd7a-f608-45b5-901c-b01678e4b69a-config\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.340135 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-client-ca\") pod \"route-controller-manager-6576b87f9c-w787d\" (UID: \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.340460 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-config\") pod \"route-controller-manager-6576b87f9c-w787d\" (UID: \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.340572 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6d03cd7a-f608-45b5-901c-b01678e4b69a-etcd-client\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.340619 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/6d03cd7a-f608-45b5-901c-b01678e4b69a-audit\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.340652 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-7m9zf"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.340689 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8slv4"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.340709 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p4zdq"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.340767 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.341180 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e33fd3b0-8406-4675-91fc-ed8b1b3e7cef-config\") pod \"machine-api-operator-5694c8668f-9rf4c\" (UID: \"e33fd3b0-8406-4675-91fc-ed8b1b3e7cef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9rf4c" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.341244 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p4zdq" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.341445 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rz6tg"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.342456 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.342469 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/e33fd3b0-8406-4675-91fc-ed8b1b3e7cef-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9rf4c\" (UID: \"e33fd3b0-8406-4675-91fc-ed8b1b3e7cef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9rf4c" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.343116 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-serving-cert\") pod \"route-controller-manager-6576b87f9c-w787d\" (UID: \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.343324 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.343477 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c8b5s"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.343967 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c8b5s" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.344337 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6d03cd7a-f608-45b5-901c-b01678e4b69a-encryption-config\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.345227 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b62zd"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.345434 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.346857 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-fv6zj"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.347970 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.349658 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d03cd7a-f608-45b5-901c-b01678e4b69a-serving-cert\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.350162 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-d8jqz"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.351322 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-qsz8j"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.352436 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.353457 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.354497 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-lxxm6"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.355482 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wjlkl"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.358161 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.361438 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-d48l7"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.362757 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.363964 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-4xcxz"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.364684 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-4xcxz" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.365392 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-ns797"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.365778 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.366945 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-ns797" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.366968 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j692h"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.368152 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-m9mvq"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.369350 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nrlwx"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.370342 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2n9v8"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.371882 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-tzv7k"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.373154 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.374430 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wmcvd"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.375688 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.376742 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6shb9"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.377942 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-s5sr5"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.379231 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bkcmk"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.380270 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.381679 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c8b5s"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.383637 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p4zdq"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.385030 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.385628 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7p95x"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.386922 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-ns797"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.388386 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-4xcxz"] Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.405674 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.426065 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.440134 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40915631-1b59-4783-8633-88ea61ed4814-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-rz6tg\" (UID: \"40915631-1b59-4783-8633-88ea61ed4814\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rz6tg" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.441116 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40915631-1b59-4783-8633-88ea61ed4814-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-rz6tg\" (UID: \"40915631-1b59-4783-8633-88ea61ed4814\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rz6tg" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.440200 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/468222f3-9fe0-4909-b72b-ec659ca3908e-proxy-tls\") pod \"machine-config-operator-74547568cd-wpqgk\" (UID: \"468222f3-9fe0-4909-b72b-ec659ca3908e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.441194 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-client-ca\") pod \"controller-manager-879f6c89f-f6lqh\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.441220 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cec358c7-a361-4654-9800-6e275a41c909-config\") pod \"machine-approver-56656f9798-44knz\" (UID: \"cec358c7-a361-4654-9800-6e275a41c909\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44knz" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.441240 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3ce9501a-7d19-42bf-94fc-b63427ef3c12-stats-auth\") pod \"router-default-5444994796-svfhs\" (UID: \"3ce9501a-7d19-42bf-94fc-b63427ef3c12\") " pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.441284 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/47211fde-bde0-457f-8336-c46af9d7ee00-serving-cert\") pod \"console-operator-58897d9998-7m9zf\" (UID: \"47211fde-bde0-457f-8336-c46af9d7ee00\") " pod="openshift-console-operator/console-operator-58897d9998-7m9zf" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.441305 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/412763c1-3872-4843-9454-988bfa904c52-srv-cert\") pod \"catalog-operator-68c6474976-gzldr\" (UID: \"412763c1-3872-4843-9454-988bfa904c52\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.441928 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gglrh\" (UniqueName: \"kubernetes.io/projected/7b597651-9700-4c4b-9d2d-c21dc37c1959-kube-api-access-gglrh\") pod \"downloads-7954f5f757-lxxm6\" (UID: \"7b597651-9700-4c4b-9d2d-c21dc37c1959\") " pod="openshift-console/downloads-7954f5f757-lxxm6" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.441963 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/82306cc4-ec57-498a-8481-5832db533206-available-featuregates\") pod \"openshift-config-operator-7777fb866f-49sgb\" (UID: \"82306cc4-ec57-498a-8481-5832db533206\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-49sgb" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442011 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ce9501a-7d19-42bf-94fc-b63427ef3c12-service-ca-bundle\") pod \"router-default-5444994796-svfhs\" (UID: \"3ce9501a-7d19-42bf-94fc-b63427ef3c12\") " pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442049 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc4688ee-1a54-48c3-b328-855143ddfe38-config\") pod \"openshift-apiserver-operator-796bbdcf4f-8slv4\" (UID: \"cc4688ee-1a54-48c3-b328-855143ddfe38\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8slv4" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442105 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-serving-cert\") pod \"controller-manager-879f6c89f-f6lqh\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442118 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cec358c7-a361-4654-9800-6e275a41c909-config\") pod \"machine-approver-56656f9798-44knz\" (UID: \"cec358c7-a361-4654-9800-6e275a41c909\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44knz" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442135 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/412763c1-3872-4843-9454-988bfa904c52-profile-collector-cert\") pod \"catalog-operator-68c6474976-gzldr\" (UID: \"412763c1-3872-4843-9454-988bfa904c52\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442216 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txgg6\" (UniqueName: \"kubernetes.io/projected/cc4688ee-1a54-48c3-b328-855143ddfe38-kube-api-access-txgg6\") pod \"openshift-apiserver-operator-796bbdcf4f-8slv4\" (UID: \"cc4688ee-1a54-48c3-b328-855143ddfe38\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8slv4" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442264 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gq8rh\" (UniqueName: \"kubernetes.io/projected/301fe92b-b1fa-46ce-a9f1-97c23550e4b9-kube-api-access-gq8rh\") pod \"cluster-samples-operator-665b6dd947-b62zd\" (UID: \"301fe92b-b1fa-46ce-a9f1-97c23550e4b9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b62zd" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442290 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8tvj\" (UniqueName: \"kubernetes.io/projected/47211fde-bde0-457f-8336-c46af9d7ee00-kube-api-access-m8tvj\") pod \"console-operator-58897d9998-7m9zf\" (UID: \"47211fde-bde0-457f-8336-c46af9d7ee00\") " pod="openshift-console-operator/console-operator-58897d9998-7m9zf" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442338 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8xtw\" (UniqueName: \"kubernetes.io/projected/412763c1-3872-4843-9454-988bfa904c52-kube-api-access-m8xtw\") pod \"catalog-operator-68c6474976-gzldr\" (UID: \"412763c1-3872-4843-9454-988bfa904c52\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442380 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gzqw\" (UniqueName: \"kubernetes.io/projected/40915631-1b59-4783-8633-88ea61ed4814-kube-api-access-9gzqw\") pod \"openshift-controller-manager-operator-756b6f6bc6-rz6tg\" (UID: \"40915631-1b59-4783-8633-88ea61ed4814\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rz6tg" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442423 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c1ec79a4-45cc-43dd-883e-c6623922eff0-serving-cert\") pod \"authentication-operator-69f744f599-cq24j\" (UID: \"c1ec79a4-45cc-43dd-883e-c6623922eff0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442502 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/82306cc4-ec57-498a-8481-5832db533206-available-featuregates\") pod \"openshift-config-operator-7777fb866f-49sgb\" (UID: \"82306cc4-ec57-498a-8481-5832db533206\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-49sgb" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442511 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-client-ca\") pod \"controller-manager-879f6c89f-f6lqh\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442526 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcnkb\" (UniqueName: \"kubernetes.io/projected/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-kube-api-access-kcnkb\") pod \"controller-manager-879f6c89f-f6lqh\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442612 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5c2j\" (UniqueName: \"kubernetes.io/projected/82306cc4-ec57-498a-8481-5832db533206-kube-api-access-l5c2j\") pod \"openshift-config-operator-7777fb866f-49sgb\" (UID: \"82306cc4-ec57-498a-8481-5832db533206\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-49sgb" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442640 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whzf2\" (UniqueName: \"kubernetes.io/projected/3ce9501a-7d19-42bf-94fc-b63427ef3c12-kube-api-access-whzf2\") pod \"router-default-5444994796-svfhs\" (UID: \"3ce9501a-7d19-42bf-94fc-b63427ef3c12\") " pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442667 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47211fde-bde0-457f-8336-c46af9d7ee00-config\") pod \"console-operator-58897d9998-7m9zf\" (UID: \"47211fde-bde0-457f-8336-c46af9d7ee00\") " pod="openshift-console-operator/console-operator-58897d9998-7m9zf" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442700 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc4688ee-1a54-48c3-b328-855143ddfe38-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-8slv4\" (UID: \"cc4688ee-1a54-48c3-b328-855143ddfe38\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8slv4" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442737 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdrd4\" (UniqueName: \"kubernetes.io/projected/cec358c7-a361-4654-9800-6e275a41c909-kube-api-access-pdrd4\") pod \"machine-approver-56656f9798-44knz\" (UID: \"cec358c7-a361-4654-9800-6e275a41c909\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44knz" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442757 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/468222f3-9fe0-4909-b72b-ec659ca3908e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-wpqgk\" (UID: \"468222f3-9fe0-4909-b72b-ec659ca3908e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442782 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c1ec79a4-45cc-43dd-883e-c6623922eff0-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-cq24j\" (UID: \"c1ec79a4-45cc-43dd-883e-c6623922eff0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442803 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjqws\" (UniqueName: \"kubernetes.io/projected/c1ec79a4-45cc-43dd-883e-c6623922eff0-kube-api-access-xjqws\") pod \"authentication-operator-69f744f599-cq24j\" (UID: \"c1ec79a4-45cc-43dd-883e-c6623922eff0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442820 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/468222f3-9fe0-4909-b72b-ec659ca3908e-images\") pod \"machine-config-operator-74547568cd-wpqgk\" (UID: \"468222f3-9fe0-4909-b72b-ec659ca3908e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442841 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-f6lqh\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442860 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/47211fde-bde0-457f-8336-c46af9d7ee00-trusted-ca\") pod \"console-operator-58897d9998-7m9zf\" (UID: \"47211fde-bde0-457f-8336-c46af9d7ee00\") " pod="openshift-console-operator/console-operator-58897d9998-7m9zf" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442877 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cec358c7-a361-4654-9800-6e275a41c909-auth-proxy-config\") pod \"machine-approver-56656f9798-44knz\" (UID: \"cec358c7-a361-4654-9800-6e275a41c909\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44knz" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442897 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c1ec79a4-45cc-43dd-883e-c6623922eff0-service-ca-bundle\") pod \"authentication-operator-69f744f599-cq24j\" (UID: \"c1ec79a4-45cc-43dd-883e-c6623922eff0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442915 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3ce9501a-7d19-42bf-94fc-b63427ef3c12-default-certificate\") pod \"router-default-5444994796-svfhs\" (UID: \"3ce9501a-7d19-42bf-94fc-b63427ef3c12\") " pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442937 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1ec79a4-45cc-43dd-883e-c6623922eff0-config\") pod \"authentication-operator-69f744f599-cq24j\" (UID: \"c1ec79a4-45cc-43dd-883e-c6623922eff0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442955 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/cec358c7-a361-4654-9800-6e275a41c909-machine-approver-tls\") pod \"machine-approver-56656f9798-44knz\" (UID: \"cec358c7-a361-4654-9800-6e275a41c909\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44knz" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442972 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8xpw\" (UniqueName: \"kubernetes.io/projected/468222f3-9fe0-4909-b72b-ec659ca3908e-kube-api-access-t8xpw\") pod \"machine-config-operator-74547568cd-wpqgk\" (UID: \"468222f3-9fe0-4909-b72b-ec659ca3908e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.442988 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-config\") pod \"controller-manager-879f6c89f-f6lqh\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.443007 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/301fe92b-b1fa-46ce-a9f1-97c23550e4b9-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-b62zd\" (UID: \"301fe92b-b1fa-46ce-a9f1-97c23550e4b9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b62zd" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.443026 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3ce9501a-7d19-42bf-94fc-b63427ef3c12-metrics-certs\") pod \"router-default-5444994796-svfhs\" (UID: \"3ce9501a-7d19-42bf-94fc-b63427ef3c12\") " pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.443064 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc4688ee-1a54-48c3-b328-855143ddfe38-config\") pod \"openshift-apiserver-operator-796bbdcf4f-8slv4\" (UID: \"cc4688ee-1a54-48c3-b328-855143ddfe38\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8slv4" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.443572 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47211fde-bde0-457f-8336-c46af9d7ee00-config\") pod \"console-operator-58897d9998-7m9zf\" (UID: \"47211fde-bde0-457f-8336-c46af9d7ee00\") " pod="openshift-console-operator/console-operator-58897d9998-7m9zf" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.443074 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82306cc4-ec57-498a-8481-5832db533206-serving-cert\") pod \"openshift-config-operator-7777fb866f-49sgb\" (UID: \"82306cc4-ec57-498a-8481-5832db533206\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-49sgb" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.443635 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40915631-1b59-4783-8633-88ea61ed4814-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-rz6tg\" (UID: \"40915631-1b59-4783-8633-88ea61ed4814\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rz6tg" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.444607 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1ec79a4-45cc-43dd-883e-c6623922eff0-config\") pod \"authentication-operator-69f744f599-cq24j\" (UID: \"c1ec79a4-45cc-43dd-883e-c6623922eff0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.444702 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-f6lqh\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.444994 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c1ec79a4-45cc-43dd-883e-c6623922eff0-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-cq24j\" (UID: \"c1ec79a4-45cc-43dd-883e-c6623922eff0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.445259 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c1ec79a4-45cc-43dd-883e-c6623922eff0-service-ca-bundle\") pod \"authentication-operator-69f744f599-cq24j\" (UID: \"c1ec79a4-45cc-43dd-883e-c6623922eff0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.445389 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/468222f3-9fe0-4909-b72b-ec659ca3908e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-wpqgk\" (UID: \"468222f3-9fe0-4909-b72b-ec659ca3908e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.445418 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cec358c7-a361-4654-9800-6e275a41c909-auth-proxy-config\") pod \"machine-approver-56656f9798-44knz\" (UID: \"cec358c7-a361-4654-9800-6e275a41c909\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44knz" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.445633 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/47211fde-bde0-457f-8336-c46af9d7ee00-trusted-ca\") pod \"console-operator-58897d9998-7m9zf\" (UID: \"47211fde-bde0-457f-8336-c46af9d7ee00\") " pod="openshift-console-operator/console-operator-58897d9998-7m9zf" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.446071 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.446480 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/47211fde-bde0-457f-8336-c46af9d7ee00-serving-cert\") pod \"console-operator-58897d9998-7m9zf\" (UID: \"47211fde-bde0-457f-8336-c46af9d7ee00\") " pod="openshift-console-operator/console-operator-58897d9998-7m9zf" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.447309 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/412763c1-3872-4843-9454-988bfa904c52-srv-cert\") pod \"catalog-operator-68c6474976-gzldr\" (UID: \"412763c1-3872-4843-9454-988bfa904c52\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.448039 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc4688ee-1a54-48c3-b328-855143ddfe38-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-8slv4\" (UID: \"cc4688ee-1a54-48c3-b328-855143ddfe38\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8slv4" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.448351 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/cec358c7-a361-4654-9800-6e275a41c909-machine-approver-tls\") pod \"machine-approver-56656f9798-44knz\" (UID: \"cec358c7-a361-4654-9800-6e275a41c909\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44knz" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.448905 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40915631-1b59-4783-8633-88ea61ed4814-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-rz6tg\" (UID: \"40915631-1b59-4783-8633-88ea61ed4814\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rz6tg" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.449233 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82306cc4-ec57-498a-8481-5832db533206-serving-cert\") pod \"openshift-config-operator-7777fb866f-49sgb\" (UID: \"82306cc4-ec57-498a-8481-5832db533206\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-49sgb" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.449295 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c1ec79a4-45cc-43dd-883e-c6623922eff0-serving-cert\") pod \"authentication-operator-69f744f599-cq24j\" (UID: \"c1ec79a4-45cc-43dd-883e-c6623922eff0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.454237 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-config\") pod \"controller-manager-879f6c89f-f6lqh\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.454756 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/301fe92b-b1fa-46ce-a9f1-97c23550e4b9-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-b62zd\" (UID: \"301fe92b-b1fa-46ce-a9f1-97c23550e4b9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b62zd" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.455546 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/412763c1-3872-4843-9454-988bfa904c52-profile-collector-cert\") pod \"catalog-operator-68c6474976-gzldr\" (UID: \"412763c1-3872-4843-9454-988bfa904c52\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.456518 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-serving-cert\") pod \"controller-manager-879f6c89f-f6lqh\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.466184 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.485552 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.505686 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.525908 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.547372 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.566588 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.577164 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3ce9501a-7d19-42bf-94fc-b63427ef3c12-default-certificate\") pod \"router-default-5444994796-svfhs\" (UID: \"3ce9501a-7d19-42bf-94fc-b63427ef3c12\") " pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.585881 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.594708 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3ce9501a-7d19-42bf-94fc-b63427ef3c12-stats-auth\") pod \"router-default-5444994796-svfhs\" (UID: \"3ce9501a-7d19-42bf-94fc-b63427ef3c12\") " pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.605561 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.617367 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3ce9501a-7d19-42bf-94fc-b63427ef3c12-metrics-certs\") pod \"router-default-5444994796-svfhs\" (UID: \"3ce9501a-7d19-42bf-94fc-b63427ef3c12\") " pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.625628 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.633625 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ce9501a-7d19-42bf-94fc-b63427ef3c12-service-ca-bundle\") pod \"router-default-5444994796-svfhs\" (UID: \"3ce9501a-7d19-42bf-94fc-b63427ef3c12\") " pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.645989 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.665810 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.675034 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/468222f3-9fe0-4909-b72b-ec659ca3908e-images\") pod \"machine-config-operator-74547568cd-wpqgk\" (UID: \"468222f3-9fe0-4909-b72b-ec659ca3908e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.685317 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.705852 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.713411 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/468222f3-9fe0-4909-b72b-ec659ca3908e-proxy-tls\") pod \"machine-config-operator-74547568cd-wpqgk\" (UID: \"468222f3-9fe0-4909-b72b-ec659ca3908e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.766862 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.786611 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.806188 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.826156 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.845550 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.866776 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.885458 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.905760 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.926537 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.946123 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.965840 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 10 06:50:15 crc kubenswrapper[4765]: I1210 06:50:15.985908 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.005602 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.026555 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.045931 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.066731 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.085805 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.105839 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.125855 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.145390 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.165855 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.185857 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.206234 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.226238 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.245963 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.265939 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.285716 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.305972 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.324529 4765 request.go:700] Waited for 1.00599153s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-service-ca/secrets?fieldSelector=metadata.name%3Dsigning-key&limit=500&resourceVersion=0 Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.326452 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.345115 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.365796 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.387771 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.405798 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.426037 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.445704 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.465667 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.485861 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.506337 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.525908 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.551304 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.565323 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.586418 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.606110 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.626665 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.646171 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.666280 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.686940 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.705540 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.739823 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5s6r\" (UniqueName: \"kubernetes.io/projected/6d03cd7a-f608-45b5-901c-b01678e4b69a-kube-api-access-c5s6r\") pod \"apiserver-76f77b778f-gvn7z\" (UID: \"6d03cd7a-f608-45b5-901c-b01678e4b69a\") " pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.777966 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gc2hr\" (UniqueName: \"kubernetes.io/projected/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-kube-api-access-gc2hr\") pod \"route-controller-manager-6576b87f9c-w787d\" (UID: \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.786923 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.795825 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2r5g8\" (UniqueName: \"kubernetes.io/projected/e33fd3b0-8406-4675-91fc-ed8b1b3e7cef-kube-api-access-2r5g8\") pod \"machine-api-operator-5694c8668f-9rf4c\" (UID: \"e33fd3b0-8406-4675-91fc-ed8b1b3e7cef\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9rf4c" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.805499 4765 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.826180 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.846435 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.865704 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.885567 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.906493 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.925495 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.945964 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.968414 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.972564 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 10 06:50:16 crc kubenswrapper[4765]: I1210 06:50:16.985759 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.006433 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.026406 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.050506 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.052540 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.066280 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.085769 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.087362 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-9rf4c" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.105833 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.124712 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d"] Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.125528 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.146470 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.158145 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" event={"ID":"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f","Type":"ContainerStarted","Data":"966630f6c55c1b58a9f3ad515f47c66096ec89624c4e054cfaa797c27981e070"} Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.169975 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.186109 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.207429 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.208018 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-gvn7z"] Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.225702 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.246697 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.250228 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9rf4c"] Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.285070 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gglrh\" (UniqueName: \"kubernetes.io/projected/7b597651-9700-4c4b-9d2d-c21dc37c1959-kube-api-access-gglrh\") pod \"downloads-7954f5f757-lxxm6\" (UID: \"7b597651-9700-4c4b-9d2d-c21dc37c1959\") " pod="openshift-console/downloads-7954f5f757-lxxm6" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.299812 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8tvj\" (UniqueName: \"kubernetes.io/projected/47211fde-bde0-457f-8336-c46af9d7ee00-kube-api-access-m8tvj\") pod \"console-operator-58897d9998-7m9zf\" (UID: \"47211fde-bde0-457f-8336-c46af9d7ee00\") " pod="openshift-console-operator/console-operator-58897d9998-7m9zf" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.319288 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcnkb\" (UniqueName: \"kubernetes.io/projected/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-kube-api-access-kcnkb\") pod \"controller-manager-879f6c89f-f6lqh\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.339231 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txgg6\" (UniqueName: \"kubernetes.io/projected/cc4688ee-1a54-48c3-b328-855143ddfe38-kube-api-access-txgg6\") pod \"openshift-apiserver-operator-796bbdcf4f-8slv4\" (UID: \"cc4688ee-1a54-48c3-b328-855143ddfe38\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8slv4" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.343749 4765 request.go:700] Waited for 1.900988687s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/serviceaccounts/olm-operator-serviceaccount/token Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.362861 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8xtw\" (UniqueName: \"kubernetes.io/projected/412763c1-3872-4843-9454-988bfa904c52-kube-api-access-m8xtw\") pod \"catalog-operator-68c6474976-gzldr\" (UID: \"412763c1-3872-4843-9454-988bfa904c52\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.380468 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gq8rh\" (UniqueName: \"kubernetes.io/projected/301fe92b-b1fa-46ce-a9f1-97c23550e4b9-kube-api-access-gq8rh\") pod \"cluster-samples-operator-665b6dd947-b62zd\" (UID: \"301fe92b-b1fa-46ce-a9f1-97c23550e4b9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b62zd" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.400201 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whzf2\" (UniqueName: \"kubernetes.io/projected/3ce9501a-7d19-42bf-94fc-b63427ef3c12-kube-api-access-whzf2\") pod \"router-default-5444994796-svfhs\" (UID: \"3ce9501a-7d19-42bf-94fc-b63427ef3c12\") " pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.420767 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gzqw\" (UniqueName: \"kubernetes.io/projected/40915631-1b59-4783-8633-88ea61ed4814-kube-api-access-9gzqw\") pod \"openshift-controller-manager-operator-756b6f6bc6-rz6tg\" (UID: \"40915631-1b59-4783-8633-88ea61ed4814\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rz6tg" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.424632 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.440633 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5c2j\" (UniqueName: \"kubernetes.io/projected/82306cc4-ec57-498a-8481-5832db533206-kube-api-access-l5c2j\") pod \"openshift-config-operator-7777fb866f-49sgb\" (UID: \"82306cc4-ec57-498a-8481-5832db533206\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-49sgb" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.460328 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdrd4\" (UniqueName: \"kubernetes.io/projected/cec358c7-a361-4654-9800-6e275a41c909-kube-api-access-pdrd4\") pod \"machine-approver-56656f9798-44knz\" (UID: \"cec358c7-a361-4654-9800-6e275a41c909\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44knz" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.474730 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8slv4" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.484642 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rz6tg" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.485410 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjqws\" (UniqueName: \"kubernetes.io/projected/c1ec79a4-45cc-43dd-883e-c6623922eff0-kube-api-access-xjqws\") pod \"authentication-operator-69f744f599-cq24j\" (UID: \"c1ec79a4-45cc-43dd-883e-c6623922eff0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.492775 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b62zd" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.497981 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8xpw\" (UniqueName: \"kubernetes.io/projected/468222f3-9fe0-4909-b72b-ec659ca3908e-kube-api-access-t8xpw\") pod \"machine-config-operator-74547568cd-wpqgk\" (UID: \"468222f3-9fe0-4909-b72b-ec659ca3908e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.499877 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-7m9zf" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.507890 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-49sgb" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.520824 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-lxxm6" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.843963 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.844056 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/83cb65fc-a542-4331-80d2-2ebccf5d2bff-registry-tls\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.844127 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr" Dec 10 06:50:17 crc kubenswrapper[4765]: E1210 06:50:17.844638 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:18.34461102 +0000 UTC m=+138.071276516 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.845554 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.845713 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.847246 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" Dec 10 06:50:17 crc kubenswrapper[4765]: W1210 06:50:17.848431 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode33fd3b0_8406_4675_91fc_ed8b1b3e7cef.slice/crio-e8bd17c93374ec32ac5a190245486746d2209e9f10fe4adc6ced064b571668c7 WatchSource:0}: Error finding container e8bd17c93374ec32ac5a190245486746d2209e9f10fe4adc6ced064b571668c7: Status 404 returned error can't find the container with id e8bd17c93374ec32ac5a190245486746d2209e9f10fe4adc6ced064b571668c7 Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.851262 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44knz" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.946469 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.946871 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.946924 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-685ww\" (UniqueName: \"kubernetes.io/projected/2d2049d7-de64-4070-959f-8cefd1f15e5d-kube-api-access-685ww\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.946969 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.947014 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-console-config\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.947064 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1e22fddb-6aeb-415e-bea6-b591462f42a8-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.947136 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/83cb65fc-a542-4331-80d2-2ebccf5d2bff-ca-trust-extracted\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.947176 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.947221 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-trusted-ca-bundle\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.947267 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.947340 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2lwf\" (UniqueName: \"kubernetes.io/projected/6256c135-6830-47c9-858c-ad896f3cdee8-kube-api-access-b2lwf\") pod \"multus-admission-controller-857f4d67dd-d8jqz\" (UID: \"6256c135-6830-47c9-858c-ad896f3cdee8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-d8jqz" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.947382 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:17 crc kubenswrapper[4765]: E1210 06:50:17.947427 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:18.447388711 +0000 UTC m=+138.174054027 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.947540 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1e22fddb-6aeb-415e-bea6-b591462f42a8-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.947665 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6256c135-6830-47c9-858c-ad896f3cdee8-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-d8jqz\" (UID: \"6256c135-6830-47c9-858c-ad896f3cdee8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-d8jqz" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.947708 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.947751 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/83cb65fc-a542-4331-80d2-2ebccf5d2bff-trusted-ca\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.947798 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d2049d7-de64-4070-959f-8cefd1f15e5d-console-serving-cert\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.947939 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qr2x\" (UniqueName: \"kubernetes.io/projected/83cb65fc-a542-4331-80d2-2ebccf5d2bff-kube-api-access-6qr2x\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.948005 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e22fddb-6aeb-415e-bea6-b591462f42a8-serving-cert\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.948109 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.948324 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/83cb65fc-a542-4331-80d2-2ebccf5d2bff-installation-pull-secrets\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.948416 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2c029930-d67e-4812-a6e1-bb8d9b806655-audit-dir\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.948492 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-audit-policies\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.948616 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:17 crc kubenswrapper[4765]: E1210 06:50:17.948652 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:18.448630626 +0000 UTC m=+138.175295952 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.948694 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.948735 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/83cb65fc-a542-4331-80d2-2ebccf5d2bff-bound-sa-token\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.948768 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2f2fd\" (UniqueName: \"kubernetes.io/projected/2c029930-d67e-4812-a6e1-bb8d9b806655-kube-api-access-2f2fd\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.948815 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/83cb65fc-a542-4331-80d2-2ebccf5d2bff-registry-tls\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.948863 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2d2049d7-de64-4070-959f-8cefd1f15e5d-console-oauth-config\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.948911 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-service-ca\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.952155 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.952284 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1e22fddb-6aeb-415e-bea6-b591462f42a8-etcd-client\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.952324 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1e22fddb-6aeb-415e-bea6-b591462f42a8-encryption-config\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.952408 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.952863 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/83cb65fc-a542-4331-80d2-2ebccf5d2bff-registry-certificates\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.953015 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.954031 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/83cb65fc-a542-4331-80d2-2ebccf5d2bff-registry-tls\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.954282 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1e22fddb-6aeb-415e-bea6-b591462f42a8-audit-dir\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.955458 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-oauth-serving-cert\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.956074 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1e22fddb-6aeb-415e-bea6-b591462f42a8-audit-policies\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.956151 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvk5h\" (UniqueName: \"kubernetes.io/projected/1e22fddb-6aeb-415e-bea6-b591462f42a8-kube-api-access-pvk5h\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.956187 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/22e4de02-d14f-4a69-8a40-87380cd3ed44-metrics-tls\") pod \"dns-operator-744455d44c-qsz8j\" (UID: \"22e4de02-d14f-4a69-8a40-87380cd3ed44\") " pod="openshift-dns-operator/dns-operator-744455d44c-qsz8j" Dec 10 06:50:17 crc kubenswrapper[4765]: I1210 06:50:17.956214 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbf98\" (UniqueName: \"kubernetes.io/projected/22e4de02-d14f-4a69-8a40-87380cd3ed44-kube-api-access-bbf98\") pod \"dns-operator-744455d44c-qsz8j\" (UID: \"22e4de02-d14f-4a69-8a40-87380cd3ed44\") " pod="openshift-dns-operator/dns-operator-744455d44c-qsz8j" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.057872 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:18 crc kubenswrapper[4765]: E1210 06:50:18.058197 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:18.558176357 +0000 UTC m=+138.284841673 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058462 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2c029930-d67e-4812-a6e1-bb8d9b806655-audit-dir\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058494 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d488de1f-84dd-45c5-a21f-16fd53d4e55a-config\") pod \"kube-controller-manager-operator-78b949d7b-2n9v8\" (UID: \"d488de1f-84dd-45c5-a21f-16fd53d4e55a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2n9v8" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058539 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/83cb65fc-a542-4331-80d2-2ebccf5d2bff-bound-sa-token\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058559 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058579 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d488de1f-84dd-45c5-a21f-16fd53d4e55a-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2n9v8\" (UID: \"d488de1f-84dd-45c5-a21f-16fd53d4e55a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2n9v8" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058598 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7ffe8020-73ec-4a69-b53f-7be8548df67b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-gzgps\" (UID: \"7ffe8020-73ec-4a69-b53f-7be8548df67b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058614 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7-etcd-service-ca\") pod \"etcd-operator-b45778765-tzv7k\" (UID: \"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058636 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2f2fd\" (UniqueName: \"kubernetes.io/projected/2c029930-d67e-4812-a6e1-bb8d9b806655-kube-api-access-2f2fd\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058627 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2c029930-d67e-4812-a6e1-bb8d9b806655-audit-dir\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058652 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25273950-0e4d-409a-8387-b571c2b15a05-signing-key\") pod \"service-ca-9c57cc56f-6shb9\" (UID: \"25273950-0e4d-409a-8387-b571c2b15a05\") " pod="openshift-service-ca/service-ca-9c57cc56f-6shb9" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058762 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7-etcd-client\") pod \"etcd-operator-b45778765-tzv7k\" (UID: \"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058786 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pswj7\" (UniqueName: \"kubernetes.io/projected/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01-kube-api-access-pswj7\") pod \"collect-profiles-29422485-p8qrm\" (UID: \"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058824 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058846 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfk7f\" (UniqueName: \"kubernetes.io/projected/34ca3d50-d8f9-420b-941c-3d46bd866149-kube-api-access-zfk7f\") pod \"olm-operator-6b444d44fb-nrl2f\" (UID: \"34ca3d50-d8f9-420b-941c-3d46bd866149\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058873 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1e22fddb-6aeb-415e-bea6-b591462f42a8-encryption-config\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058893 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/fa5d5cf0-2956-4bb1-9372-0f858ff17342-tmpfs\") pod \"packageserver-d55dfcdfc-ln8tm\" (UID: \"fa5d5cf0-2956-4bb1-9372-0f858ff17342\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058913 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058933 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf9a4b55-b2dd-497c-aee6-c2fa241d5b33-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-j692h\" (UID: \"bf9a4b55-b2dd-497c-aee6-c2fa241d5b33\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j692h" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058964 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25285c06-58d0-4f7c-a04c-ff944a2c7add-config\") pod \"kube-apiserver-operator-766d6c64bb-nrlwx\" (UID: \"25285c06-58d0-4f7c-a04c-ff944a2c7add\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nrlwx" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.058995 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-oauth-serving-cert\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.059175 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-m9mvq\" (UID: \"4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-m9mvq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.059243 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbf98\" (UniqueName: \"kubernetes.io/projected/22e4de02-d14f-4a69-8a40-87380cd3ed44-kube-api-access-bbf98\") pod \"dns-operator-744455d44c-qsz8j\" (UID: \"22e4de02-d14f-4a69-8a40-87380cd3ed44\") " pod="openshift-dns-operator/dns-operator-744455d44c-qsz8j" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.059423 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-685ww\" (UniqueName: \"kubernetes.io/projected/2d2049d7-de64-4070-959f-8cefd1f15e5d-kube-api-access-685ww\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.059451 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/91562083-eeea-41be-8455-e5fb3ff57453-csi-data-dir\") pod \"csi-hostpathplugin-7p95x\" (UID: \"91562083-eeea-41be-8455-e5fb3ff57453\") " pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.059569 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-console-config\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.059600 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/bfc4f256-0f84-4e7e-8e29-8e196911ee59-certs\") pod \"machine-config-server-tcrlf\" (UID: \"bfc4f256-0f84-4e7e-8e29-8e196911ee59\") " pod="openshift-machine-config-operator/machine-config-server-tcrlf" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.059630 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7ffe8020-73ec-4a69-b53f-7be8548df67b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-gzgps\" (UID: \"7ffe8020-73ec-4a69-b53f-7be8548df67b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.059655 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8c1203d5-c663-4f5a-96b5-ca6b398114bd-bound-sa-token\") pod \"ingress-operator-5b745b69d9-nqr5d\" (UID: \"8c1203d5-c663-4f5a-96b5-ca6b398114bd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.059678 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlzzm\" (UniqueName: \"kubernetes.io/projected/9c657c6a-60d0-4e3f-b70e-c602bb01bf7c-kube-api-access-zlzzm\") pod \"package-server-manager-789f6589d5-s5sr5\" (UID: \"9c657c6a-60d0-4e3f-b70e-c602bb01bf7c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-s5sr5" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.059704 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9c657c6a-60d0-4e3f-b70e-c602bb01bf7c-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-s5sr5\" (UID: \"9c657c6a-60d0-4e3f-b70e-c602bb01bf7c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-s5sr5" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.059740 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-oauth-serving-cert\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.059748 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/83cb65fc-a542-4331-80d2-2ebccf5d2bff-ca-trust-extracted\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.059777 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.059904 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.060124 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/83cb65fc-a542-4331-80d2-2ebccf5d2bff-ca-trust-extracted\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.060702 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq75m\" (UniqueName: \"kubernetes.io/projected/fa5d5cf0-2956-4bb1-9372-0f858ff17342-kube-api-access-sq75m\") pod \"packageserver-d55dfcdfc-ln8tm\" (UID: \"fa5d5cf0-2956-4bb1-9372-0f858ff17342\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.060729 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01-secret-volume\") pod \"collect-profiles-29422485-p8qrm\" (UID: \"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.060760 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8c1203d5-c663-4f5a-96b5-ca6b398114bd-trusted-ca\") pod \"ingress-operator-5b745b69d9-nqr5d\" (UID: \"8c1203d5-c663-4f5a-96b5-ca6b398114bd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.060782 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a729b53-a69c-4fe8-a888-f61aa0856910-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-p4zdq\" (UID: \"1a729b53-a69c-4fe8-a888-f61aa0856910\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p4zdq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.060824 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.060850 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjszt\" (UniqueName: \"kubernetes.io/projected/8c1203d5-c663-4f5a-96b5-ca6b398114bd-kube-api-access-pjszt\") pod \"ingress-operator-5b745b69d9-nqr5d\" (UID: \"8c1203d5-c663-4f5a-96b5-ca6b398114bd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.060874 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bkcmk\" (UID: \"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76\") " pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.060982 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-console-config\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.061008 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bkcmk\" (UID: \"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76\") " pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.061248 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.061360 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8vzq\" (UniqueName: \"kubernetes.io/projected/2b45c80a-66df-4cda-9b80-33101f1f8060-kube-api-access-f8vzq\") pod \"dns-default-ns797\" (UID: \"2b45c80a-66df-4cda-9b80-33101f1f8060\") " pod="openshift-dns/dns-default-ns797" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.061398 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.061431 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d2049d7-de64-4070-959f-8cefd1f15e5d-console-serving-cert\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.061495 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lcxv\" (UniqueName: \"kubernetes.io/projected/bfc4f256-0f84-4e7e-8e29-8e196911ee59-kube-api-access-7lcxv\") pod \"machine-config-server-tcrlf\" (UID: \"bfc4f256-0f84-4e7e-8e29-8e196911ee59\") " pod="openshift-machine-config-operator/machine-config-server-tcrlf" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.061596 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxf9t\" (UniqueName: \"kubernetes.io/projected/09f3f071-b7af-4e7f-b459-b5316393be20-kube-api-access-sxf9t\") pod \"ingress-canary-4xcxz\" (UID: \"09f3f071-b7af-4e7f-b459-b5316393be20\") " pod="openshift-ingress-canary/ingress-canary-4xcxz" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.061621 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/7ffe8020-73ec-4a69-b53f-7be8548df67b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-gzgps\" (UID: \"7ffe8020-73ec-4a69-b53f-7be8548df67b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.061647 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25273950-0e4d-409a-8387-b571c2b15a05-signing-cabundle\") pod \"service-ca-9c57cc56f-6shb9\" (UID: \"25273950-0e4d-409a-8387-b571c2b15a05\") " pod="openshift-service-ca/service-ca-9c57cc56f-6shb9" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.061763 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/25285c06-58d0-4f7c-a04c-ff944a2c7add-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-nrlwx\" (UID: \"25285c06-58d0-4f7c-a04c-ff944a2c7add\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nrlwx" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.062022 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qr2x\" (UniqueName: \"kubernetes.io/projected/83cb65fc-a542-4331-80d2-2ebccf5d2bff-kube-api-access-6qr2x\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.062128 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/25285c06-58d0-4f7c-a04c-ff944a2c7add-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-nrlwx\" (UID: \"25285c06-58d0-4f7c-a04c-ff944a2c7add\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nrlwx" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.062157 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/91562083-eeea-41be-8455-e5fb3ff57453-registration-dir\") pod \"csi-hostpathplugin-7p95x\" (UID: \"91562083-eeea-41be-8455-e5fb3ff57453\") " pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.062205 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjpqv\" (UniqueName: \"kubernetes.io/projected/25273950-0e4d-409a-8387-b571c2b15a05-kube-api-access-pjpqv\") pod \"service-ca-9c57cc56f-6shb9\" (UID: \"25273950-0e4d-409a-8387-b571c2b15a05\") " pod="openshift-service-ca/service-ca-9c57cc56f-6shb9" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.062230 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7-proxy-tls\") pod \"machine-config-controller-84d6567774-m9mvq\" (UID: \"4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-m9mvq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.062289 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/09f3f071-b7af-4e7f-b459-b5316393be20-cert\") pod \"ingress-canary-4xcxz\" (UID: \"09f3f071-b7af-4e7f-b459-b5316393be20\") " pod="openshift-ingress-canary/ingress-canary-4xcxz" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.062313 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/206f5b8b-75ee-44ae-bae5-dfd6bc829b9b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-c8b5s\" (UID: \"206f5b8b-75ee-44ae-bae5-dfd6bc829b9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c8b5s" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.062390 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gfzb\" (UniqueName: \"kubernetes.io/projected/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76-kube-api-access-9gfzb\") pod \"marketplace-operator-79b997595-bkcmk\" (UID: \"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76\") " pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.062455 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/83cb65fc-a542-4331-80d2-2ebccf5d2bff-installation-pull-secrets\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.062585 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqkkd\" (UniqueName: \"kubernetes.io/projected/7ffe8020-73ec-4a69-b53f-7be8548df67b-kube-api-access-hqkkd\") pod \"cluster-image-registry-operator-dc59b4c8b-gzgps\" (UID: \"7ffe8020-73ec-4a69-b53f-7be8548df67b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.062610 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3719873e-72ba-4b39-a8d5-8f4b3196a96d-config\") pod \"service-ca-operator-777779d784-wmcvd\" (UID: \"3719873e-72ba-4b39-a8d5-8f4b3196a96d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wmcvd" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.062631 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/91562083-eeea-41be-8455-e5fb3ff57453-plugins-dir\") pod \"csi-hostpathplugin-7p95x\" (UID: \"91562083-eeea-41be-8455-e5fb3ff57453\") " pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.062748 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-audit-policies\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.063623 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.063800 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fa5d5cf0-2956-4bb1-9372-0f858ff17342-apiservice-cert\") pod \"packageserver-d55dfcdfc-ln8tm\" (UID: \"fa5d5cf0-2956-4bb1-9372-0f858ff17342\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.063946 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-audit-policies\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.063873 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.064236 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2d2049d7-de64-4070-959f-8cefd1f15e5d-console-oauth-config\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.064294 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-service-ca\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.064533 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1e22fddb-6aeb-415e-bea6-b591462f42a8-etcd-client\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.064594 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7-etcd-ca\") pod \"etcd-operator-b45778765-tzv7k\" (UID: \"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.064745 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2b45c80a-66df-4cda-9b80-33101f1f8060-config-volume\") pod \"dns-default-ns797\" (UID: \"2b45c80a-66df-4cda-9b80-33101f1f8060\") " pod="openshift-dns/dns-default-ns797" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.064807 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2b45c80a-66df-4cda-9b80-33101f1f8060-metrics-tls\") pod \"dns-default-ns797\" (UID: \"2b45c80a-66df-4cda-9b80-33101f1f8060\") " pod="openshift-dns/dns-default-ns797" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.064880 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/83cb65fc-a542-4331-80d2-2ebccf5d2bff-registry-certificates\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.064937 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.065009 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1e22fddb-6aeb-415e-bea6-b591462f42a8-audit-dir\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.065050 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndqmn\" (UniqueName: \"kubernetes.io/projected/9195e6d4-dee4-4450-9909-aa0d120dc162-kube-api-access-ndqmn\") pod \"migrator-59844c95c7-d48l7\" (UID: \"9195e6d4-dee4-4450-9909-aa0d120dc162\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-d48l7" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.065068 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/bfc4f256-0f84-4e7e-8e29-8e196911ee59-node-bootstrap-token\") pod \"machine-config-server-tcrlf\" (UID: \"bfc4f256-0f84-4e7e-8e29-8e196911ee59\") " pod="openshift-machine-config-operator/machine-config-server-tcrlf" Dec 10 06:50:18 crc kubenswrapper[4765]: W1210 06:50:18.065074 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ce9501a_7d19_42bf_94fc_b63427ef3c12.slice/crio-99c711dbf6877709c802d6d41a5a92804abaf14b27b177a2e569f416da497706 WatchSource:0}: Error finding container 99c711dbf6877709c802d6d41a5a92804abaf14b27b177a2e569f416da497706: Status 404 returned error can't find the container with id 99c711dbf6877709c802d6d41a5a92804abaf14b27b177a2e569f416da497706 Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.065122 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1e22fddb-6aeb-415e-bea6-b591462f42a8-audit-dir\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.065165 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1e22fddb-6aeb-415e-bea6-b591462f42a8-audit-policies\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.065240 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/206f5b8b-75ee-44ae-bae5-dfd6bc829b9b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-c8b5s\" (UID: \"206f5b8b-75ee-44ae-bae5-dfd6bc829b9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c8b5s" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.065277 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7-serving-cert\") pod \"etcd-operator-b45778765-tzv7k\" (UID: \"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.066087 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/83cb65fc-a542-4331-80d2-2ebccf5d2bff-registry-certificates\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.066177 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8c1203d5-c663-4f5a-96b5-ca6b398114bd-metrics-tls\") pod \"ingress-operator-5b745b69d9-nqr5d\" (UID: \"8c1203d5-c663-4f5a-96b5-ca6b398114bd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.066210 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvk5h\" (UniqueName: \"kubernetes.io/projected/1e22fddb-6aeb-415e-bea6-b591462f42a8-kube-api-access-pvk5h\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.066241 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/22e4de02-d14f-4a69-8a40-87380cd3ed44-metrics-tls\") pod \"dns-operator-744455d44c-qsz8j\" (UID: \"22e4de02-d14f-4a69-8a40-87380cd3ed44\") " pod="openshift-dns-operator/dns-operator-744455d44c-qsz8j" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.066259 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njh2b\" (UniqueName: \"kubernetes.io/projected/bf9a4b55-b2dd-497c-aee6-c2fa241d5b33-kube-api-access-njh2b\") pod \"control-plane-machine-set-operator-78cbb6b69f-j692h\" (UID: \"bf9a4b55-b2dd-497c-aee6-c2fa241d5b33\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j692h" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.066311 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.066330 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d488de1f-84dd-45c5-a21f-16fd53d4e55a-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2n9v8\" (UID: \"d488de1f-84dd-45c5-a21f-16fd53d4e55a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2n9v8" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.066372 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cthl4\" (UniqueName: \"kubernetes.io/projected/3719873e-72ba-4b39-a8d5-8f4b3196a96d-kube-api-access-cthl4\") pod \"service-ca-operator-777779d784-wmcvd\" (UID: \"3719873e-72ba-4b39-a8d5-8f4b3196a96d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wmcvd" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.066401 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/34ca3d50-d8f9-420b-941c-3d46bd866149-srv-cert\") pod \"olm-operator-6b444d44fb-nrl2f\" (UID: \"34ca3d50-d8f9-420b-941c-3d46bd866149\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.066419 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.066474 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.066607 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1e22fddb-6aeb-415e-bea6-b591462f42a8-audit-policies\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.066757 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-service-ca\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.067739 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.067973 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1e22fddb-6aeb-415e-bea6-b591462f42a8-encryption-config\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.067495 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7-config\") pod \"etcd-operator-b45778765-tzv7k\" (UID: \"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.068218 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1e22fddb-6aeb-415e-bea6-b591462f42a8-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.068284 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fa5d5cf0-2956-4bb1-9372-0f858ff17342-webhook-cert\") pod \"packageserver-d55dfcdfc-ln8tm\" (UID: \"fa5d5cf0-2956-4bb1-9372-0f858ff17342\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.068324 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpgj4\" (UniqueName: \"kubernetes.io/projected/3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7-kube-api-access-xpgj4\") pod \"etcd-operator-b45778765-tzv7k\" (UID: \"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.068364 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a729b53-a69c-4fe8-a888-f61aa0856910-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-p4zdq\" (UID: \"1a729b53-a69c-4fe8-a888-f61aa0856910\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p4zdq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.068714 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-trusted-ca-bundle\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.068854 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4n8z5\" (UniqueName: \"kubernetes.io/projected/1a729b53-a69c-4fe8-a888-f61aa0856910-kube-api-access-4n8z5\") pod \"kube-storage-version-migrator-operator-b67b599dd-p4zdq\" (UID: \"1a729b53-a69c-4fe8-a888-f61aa0856910\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p4zdq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.068891 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1e22fddb-6aeb-415e-bea6-b591462f42a8-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.068956 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.069233 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2lwf\" (UniqueName: \"kubernetes.io/projected/6256c135-6830-47c9-858c-ad896f3cdee8-kube-api-access-b2lwf\") pod \"multus-admission-controller-857f4d67dd-d8jqz\" (UID: \"6256c135-6830-47c9-858c-ad896f3cdee8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-d8jqz" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.069274 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1e22fddb-6aeb-415e-bea6-b591462f42a8-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.069296 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6256c135-6830-47c9-858c-ad896f3cdee8-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-d8jqz\" (UID: \"6256c135-6830-47c9-858c-ad896f3cdee8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-d8jqz" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.069348 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/34ca3d50-d8f9-420b-941c-3d46bd866149-profile-collector-cert\") pod \"olm-operator-6b444d44fb-nrl2f\" (UID: \"34ca3d50-d8f9-420b-941c-3d46bd866149\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.069368 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/91562083-eeea-41be-8455-e5fb3ff57453-socket-dir\") pod \"csi-hostpathplugin-7p95x\" (UID: \"91562083-eeea-41be-8455-e5fb3ff57453\") " pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.069387 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzgsm\" (UniqueName: \"kubernetes.io/projected/91562083-eeea-41be-8455-e5fb3ff57453-kube-api-access-lzgsm\") pod \"csi-hostpathplugin-7p95x\" (UID: \"91562083-eeea-41be-8455-e5fb3ff57453\") " pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.069407 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/83cb65fc-a542-4331-80d2-2ebccf5d2bff-trusted-ca\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.070001 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2d2049d7-de64-4070-959f-8cefd1f15e5d-console-oauth-config\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.070146 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/83cb65fc-a542-4331-80d2-2ebccf5d2bff-installation-pull-secrets\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.070265 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-trusted-ca-bundle\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.070550 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1e22fddb-6aeb-415e-bea6-b591462f42a8-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.070858 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/91562083-eeea-41be-8455-e5fb3ff57453-mountpoint-dir\") pod \"csi-hostpathplugin-7p95x\" (UID: \"91562083-eeea-41be-8455-e5fb3ff57453\") " pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.070895 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e22fddb-6aeb-415e-bea6-b591462f42a8-serving-cert\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.070928 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/206f5b8b-75ee-44ae-bae5-dfd6bc829b9b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-c8b5s\" (UID: \"206f5b8b-75ee-44ae-bae5-dfd6bc829b9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c8b5s" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.071455 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.071547 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dj4cq\" (UniqueName: \"kubernetes.io/projected/4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7-kube-api-access-dj4cq\") pod \"machine-config-controller-84d6567774-m9mvq\" (UID: \"4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-m9mvq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.071613 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01-config-volume\") pod \"collect-profiles-29422485-p8qrm\" (UID: \"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.071708 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3719873e-72ba-4b39-a8d5-8f4b3196a96d-serving-cert\") pod \"service-ca-operator-777779d784-wmcvd\" (UID: \"3719873e-72ba-4b39-a8d5-8f4b3196a96d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wmcvd" Dec 10 06:50:18 crc kubenswrapper[4765]: E1210 06:50:18.071821 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:18.571804329 +0000 UTC m=+138.298469855 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.072118 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/83cb65fc-a542-4331-80d2-2ebccf5d2bff-trusted-ca\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.072870 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1e22fddb-6aeb-415e-bea6-b591462f42a8-etcd-client\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.073859 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.074261 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d2049d7-de64-4070-959f-8cefd1f15e5d-console-serving-cert\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.076460 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.076948 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.077560 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6256c135-6830-47c9-858c-ad896f3cdee8-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-d8jqz\" (UID: \"6256c135-6830-47c9-858c-ad896f3cdee8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-d8jqz" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.078549 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e22fddb-6aeb-415e-bea6-b591462f42a8-serving-cert\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.094414 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.098910 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/22e4de02-d14f-4a69-8a40-87380cd3ed44-metrics-tls\") pod \"dns-operator-744455d44c-qsz8j\" (UID: \"22e4de02-d14f-4a69-8a40-87380cd3ed44\") " pod="openshift-dns-operator/dns-operator-744455d44c-qsz8j" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.102010 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.108361 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.124849 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/83cb65fc-a542-4331-80d2-2ebccf5d2bff-bound-sa-token\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.125862 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2f2fd\" (UniqueName: \"kubernetes.io/projected/2c029930-d67e-4812-a6e1-bb8d9b806655-kube-api-access-2f2fd\") pod \"oauth-openshift-558db77b4-ljp6b\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.147612 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbf98\" (UniqueName: \"kubernetes.io/projected/22e4de02-d14f-4a69-8a40-87380cd3ed44-kube-api-access-bbf98\") pod \"dns-operator-744455d44c-qsz8j\" (UID: \"22e4de02-d14f-4a69-8a40-87380cd3ed44\") " pod="openshift-dns-operator/dns-operator-744455d44c-qsz8j" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.168685 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9rf4c" event={"ID":"e33fd3b0-8406-4675-91fc-ed8b1b3e7cef","Type":"ContainerStarted","Data":"e8bd17c93374ec32ac5a190245486746d2209e9f10fe4adc6ced064b571668c7"} Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.173520 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-685ww\" (UniqueName: \"kubernetes.io/projected/2d2049d7-de64-4070-959f-8cefd1f15e5d-kube-api-access-685ww\") pod \"console-f9d7485db-fv6zj\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174041 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174253 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25273950-0e4d-409a-8387-b571c2b15a05-signing-key\") pod \"service-ca-9c57cc56f-6shb9\" (UID: \"25273950-0e4d-409a-8387-b571c2b15a05\") " pod="openshift-service-ca/service-ca-9c57cc56f-6shb9" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174309 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7-etcd-client\") pod \"etcd-operator-b45778765-tzv7k\" (UID: \"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174330 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pswj7\" (UniqueName: \"kubernetes.io/projected/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01-kube-api-access-pswj7\") pod \"collect-profiles-29422485-p8qrm\" (UID: \"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174356 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfk7f\" (UniqueName: \"kubernetes.io/projected/34ca3d50-d8f9-420b-941c-3d46bd866149-kube-api-access-zfk7f\") pod \"olm-operator-6b444d44fb-nrl2f\" (UID: \"34ca3d50-d8f9-420b-941c-3d46bd866149\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174389 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/fa5d5cf0-2956-4bb1-9372-0f858ff17342-tmpfs\") pod \"packageserver-d55dfcdfc-ln8tm\" (UID: \"fa5d5cf0-2956-4bb1-9372-0f858ff17342\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174417 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf9a4b55-b2dd-497c-aee6-c2fa241d5b33-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-j692h\" (UID: \"bf9a4b55-b2dd-497c-aee6-c2fa241d5b33\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j692h" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174441 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25285c06-58d0-4f7c-a04c-ff944a2c7add-config\") pod \"kube-apiserver-operator-766d6c64bb-nrlwx\" (UID: \"25285c06-58d0-4f7c-a04c-ff944a2c7add\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nrlwx" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174463 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-m9mvq\" (UID: \"4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-m9mvq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174486 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/91562083-eeea-41be-8455-e5fb3ff57453-csi-data-dir\") pod \"csi-hostpathplugin-7p95x\" (UID: \"91562083-eeea-41be-8455-e5fb3ff57453\") " pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174513 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/bfc4f256-0f84-4e7e-8e29-8e196911ee59-certs\") pod \"machine-config-server-tcrlf\" (UID: \"bfc4f256-0f84-4e7e-8e29-8e196911ee59\") " pod="openshift-machine-config-operator/machine-config-server-tcrlf" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174534 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7ffe8020-73ec-4a69-b53f-7be8548df67b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-gzgps\" (UID: \"7ffe8020-73ec-4a69-b53f-7be8548df67b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174558 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8c1203d5-c663-4f5a-96b5-ca6b398114bd-bound-sa-token\") pod \"ingress-operator-5b745b69d9-nqr5d\" (UID: \"8c1203d5-c663-4f5a-96b5-ca6b398114bd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174581 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlzzm\" (UniqueName: \"kubernetes.io/projected/9c657c6a-60d0-4e3f-b70e-c602bb01bf7c-kube-api-access-zlzzm\") pod \"package-server-manager-789f6589d5-s5sr5\" (UID: \"9c657c6a-60d0-4e3f-b70e-c602bb01bf7c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-s5sr5" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174604 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9c657c6a-60d0-4e3f-b70e-c602bb01bf7c-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-s5sr5\" (UID: \"9c657c6a-60d0-4e3f-b70e-c602bb01bf7c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-s5sr5" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174638 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq75m\" (UniqueName: \"kubernetes.io/projected/fa5d5cf0-2956-4bb1-9372-0f858ff17342-kube-api-access-sq75m\") pod \"packageserver-d55dfcdfc-ln8tm\" (UID: \"fa5d5cf0-2956-4bb1-9372-0f858ff17342\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174662 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01-secret-volume\") pod \"collect-profiles-29422485-p8qrm\" (UID: \"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174685 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8c1203d5-c663-4f5a-96b5-ca6b398114bd-trusted-ca\") pod \"ingress-operator-5b745b69d9-nqr5d\" (UID: \"8c1203d5-c663-4f5a-96b5-ca6b398114bd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.174709 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a729b53-a69c-4fe8-a888-f61aa0856910-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-p4zdq\" (UID: \"1a729b53-a69c-4fe8-a888-f61aa0856910\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p4zdq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.181874 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjszt\" (UniqueName: \"kubernetes.io/projected/8c1203d5-c663-4f5a-96b5-ca6b398114bd-kube-api-access-pjszt\") pod \"ingress-operator-5b745b69d9-nqr5d\" (UID: \"8c1203d5-c663-4f5a-96b5-ca6b398114bd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.181954 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bkcmk\" (UID: \"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76\") " pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.181979 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bkcmk\" (UID: \"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76\") " pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.182017 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8vzq\" (UniqueName: \"kubernetes.io/projected/2b45c80a-66df-4cda-9b80-33101f1f8060-kube-api-access-f8vzq\") pod \"dns-default-ns797\" (UID: \"2b45c80a-66df-4cda-9b80-33101f1f8060\") " pod="openshift-dns/dns-default-ns797" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.182052 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lcxv\" (UniqueName: \"kubernetes.io/projected/bfc4f256-0f84-4e7e-8e29-8e196911ee59-kube-api-access-7lcxv\") pod \"machine-config-server-tcrlf\" (UID: \"bfc4f256-0f84-4e7e-8e29-8e196911ee59\") " pod="openshift-machine-config-operator/machine-config-server-tcrlf" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.182076 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/7ffe8020-73ec-4a69-b53f-7be8548df67b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-gzgps\" (UID: \"7ffe8020-73ec-4a69-b53f-7be8548df67b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.182129 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25273950-0e4d-409a-8387-b571c2b15a05-signing-cabundle\") pod \"service-ca-9c57cc56f-6shb9\" (UID: \"25273950-0e4d-409a-8387-b571c2b15a05\") " pod="openshift-service-ca/service-ca-9c57cc56f-6shb9" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.182164 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxf9t\" (UniqueName: \"kubernetes.io/projected/09f3f071-b7af-4e7f-b459-b5316393be20-kube-api-access-sxf9t\") pod \"ingress-canary-4xcxz\" (UID: \"09f3f071-b7af-4e7f-b459-b5316393be20\") " pod="openshift-ingress-canary/ingress-canary-4xcxz" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.182195 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/25285c06-58d0-4f7c-a04c-ff944a2c7add-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-nrlwx\" (UID: \"25285c06-58d0-4f7c-a04c-ff944a2c7add\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nrlwx" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.182220 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjpqv\" (UniqueName: \"kubernetes.io/projected/25273950-0e4d-409a-8387-b571c2b15a05-kube-api-access-pjpqv\") pod \"service-ca-9c57cc56f-6shb9\" (UID: \"25273950-0e4d-409a-8387-b571c2b15a05\") " pod="openshift-service-ca/service-ca-9c57cc56f-6shb9" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.182246 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7-proxy-tls\") pod \"machine-config-controller-84d6567774-m9mvq\" (UID: \"4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-m9mvq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.182285 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/25285c06-58d0-4f7c-a04c-ff944a2c7add-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-nrlwx\" (UID: \"25285c06-58d0-4f7c-a04c-ff944a2c7add\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nrlwx" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.182308 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/91562083-eeea-41be-8455-e5fb3ff57453-registration-dir\") pod \"csi-hostpathplugin-7p95x\" (UID: \"91562083-eeea-41be-8455-e5fb3ff57453\") " pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.182333 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/09f3f071-b7af-4e7f-b459-b5316393be20-cert\") pod \"ingress-canary-4xcxz\" (UID: \"09f3f071-b7af-4e7f-b459-b5316393be20\") " pod="openshift-ingress-canary/ingress-canary-4xcxz" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.182382 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/206f5b8b-75ee-44ae-bae5-dfd6bc829b9b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-c8b5s\" (UID: \"206f5b8b-75ee-44ae-bae5-dfd6bc829b9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c8b5s" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.182411 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gfzb\" (UniqueName: \"kubernetes.io/projected/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76-kube-api-access-9gfzb\") pod \"marketplace-operator-79b997595-bkcmk\" (UID: \"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76\") " pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.182439 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqkkd\" (UniqueName: \"kubernetes.io/projected/7ffe8020-73ec-4a69-b53f-7be8548df67b-kube-api-access-hqkkd\") pod \"cluster-image-registry-operator-dc59b4c8b-gzgps\" (UID: \"7ffe8020-73ec-4a69-b53f-7be8548df67b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.182463 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3719873e-72ba-4b39-a8d5-8f4b3196a96d-config\") pod \"service-ca-operator-777779d784-wmcvd\" (UID: \"3719873e-72ba-4b39-a8d5-8f4b3196a96d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wmcvd" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.182491 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/91562083-eeea-41be-8455-e5fb3ff57453-plugins-dir\") pod \"csi-hostpathplugin-7p95x\" (UID: \"91562083-eeea-41be-8455-e5fb3ff57453\") " pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.183388 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fa5d5cf0-2956-4bb1-9372-0f858ff17342-apiservice-cert\") pod \"packageserver-d55dfcdfc-ln8tm\" (UID: \"fa5d5cf0-2956-4bb1-9372-0f858ff17342\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.184050 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7-etcd-ca\") pod \"etcd-operator-b45778765-tzv7k\" (UID: \"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.184105 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2b45c80a-66df-4cda-9b80-33101f1f8060-config-volume\") pod \"dns-default-ns797\" (UID: \"2b45c80a-66df-4cda-9b80-33101f1f8060\") " pod="openshift-dns/dns-default-ns797" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.185173 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2b45c80a-66df-4cda-9b80-33101f1f8060-metrics-tls\") pod \"dns-default-ns797\" (UID: \"2b45c80a-66df-4cda-9b80-33101f1f8060\") " pod="openshift-dns/dns-default-ns797" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.185213 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndqmn\" (UniqueName: \"kubernetes.io/projected/9195e6d4-dee4-4450-9909-aa0d120dc162-kube-api-access-ndqmn\") pod \"migrator-59844c95c7-d48l7\" (UID: \"9195e6d4-dee4-4450-9909-aa0d120dc162\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-d48l7" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.185240 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/bfc4f256-0f84-4e7e-8e29-8e196911ee59-node-bootstrap-token\") pod \"machine-config-server-tcrlf\" (UID: \"bfc4f256-0f84-4e7e-8e29-8e196911ee59\") " pod="openshift-machine-config-operator/machine-config-server-tcrlf" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.185272 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/206f5b8b-75ee-44ae-bae5-dfd6bc829b9b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-c8b5s\" (UID: \"206f5b8b-75ee-44ae-bae5-dfd6bc829b9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c8b5s" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.185295 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7-serving-cert\") pod \"etcd-operator-b45778765-tzv7k\" (UID: \"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.185327 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8c1203d5-c663-4f5a-96b5-ca6b398114bd-metrics-tls\") pod \"ingress-operator-5b745b69d9-nqr5d\" (UID: \"8c1203d5-c663-4f5a-96b5-ca6b398114bd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.185362 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njh2b\" (UniqueName: \"kubernetes.io/projected/bf9a4b55-b2dd-497c-aee6-c2fa241d5b33-kube-api-access-njh2b\") pod \"control-plane-machine-set-operator-78cbb6b69f-j692h\" (UID: \"bf9a4b55-b2dd-497c-aee6-c2fa241d5b33\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j692h" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.185390 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d488de1f-84dd-45c5-a21f-16fd53d4e55a-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2n9v8\" (UID: \"d488de1f-84dd-45c5-a21f-16fd53d4e55a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2n9v8" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.185421 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cthl4\" (UniqueName: \"kubernetes.io/projected/3719873e-72ba-4b39-a8d5-8f4b3196a96d-kube-api-access-cthl4\") pod \"service-ca-operator-777779d784-wmcvd\" (UID: \"3719873e-72ba-4b39-a8d5-8f4b3196a96d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wmcvd" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.185444 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/34ca3d50-d8f9-420b-941c-3d46bd866149-srv-cert\") pod \"olm-operator-6b444d44fb-nrl2f\" (UID: \"34ca3d50-d8f9-420b-941c-3d46bd866149\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.185470 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7-config\") pod \"etcd-operator-b45778765-tzv7k\" (UID: \"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.185492 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fa5d5cf0-2956-4bb1-9372-0f858ff17342-webhook-cert\") pod \"packageserver-d55dfcdfc-ln8tm\" (UID: \"fa5d5cf0-2956-4bb1-9372-0f858ff17342\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.186264 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpgj4\" (UniqueName: \"kubernetes.io/projected/3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7-kube-api-access-xpgj4\") pod \"etcd-operator-b45778765-tzv7k\" (UID: \"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.186626 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a729b53-a69c-4fe8-a888-f61aa0856910-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-p4zdq\" (UID: \"1a729b53-a69c-4fe8-a888-f61aa0856910\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p4zdq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.186659 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4n8z5\" (UniqueName: \"kubernetes.io/projected/1a729b53-a69c-4fe8-a888-f61aa0856910-kube-api-access-4n8z5\") pod \"kube-storage-version-migrator-operator-b67b599dd-p4zdq\" (UID: \"1a729b53-a69c-4fe8-a888-f61aa0856910\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p4zdq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.186757 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/34ca3d50-d8f9-420b-941c-3d46bd866149-profile-collector-cert\") pod \"olm-operator-6b444d44fb-nrl2f\" (UID: \"34ca3d50-d8f9-420b-941c-3d46bd866149\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.186784 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/91562083-eeea-41be-8455-e5fb3ff57453-socket-dir\") pod \"csi-hostpathplugin-7p95x\" (UID: \"91562083-eeea-41be-8455-e5fb3ff57453\") " pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.186987 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzgsm\" (UniqueName: \"kubernetes.io/projected/91562083-eeea-41be-8455-e5fb3ff57453-kube-api-access-lzgsm\") pod \"csi-hostpathplugin-7p95x\" (UID: \"91562083-eeea-41be-8455-e5fb3ff57453\") " pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.187025 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/91562083-eeea-41be-8455-e5fb3ff57453-mountpoint-dir\") pod \"csi-hostpathplugin-7p95x\" (UID: \"91562083-eeea-41be-8455-e5fb3ff57453\") " pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.187065 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/206f5b8b-75ee-44ae-bae5-dfd6bc829b9b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-c8b5s\" (UID: \"206f5b8b-75ee-44ae-bae5-dfd6bc829b9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c8b5s" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.187132 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dj4cq\" (UniqueName: \"kubernetes.io/projected/4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7-kube-api-access-dj4cq\") pod \"machine-config-controller-84d6567774-m9mvq\" (UID: \"4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-m9mvq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.187155 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01-config-volume\") pod \"collect-profiles-29422485-p8qrm\" (UID: \"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.187182 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3719873e-72ba-4b39-a8d5-8f4b3196a96d-serving-cert\") pod \"service-ca-operator-777779d784-wmcvd\" (UID: \"3719873e-72ba-4b39-a8d5-8f4b3196a96d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wmcvd" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.187216 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d488de1f-84dd-45c5-a21f-16fd53d4e55a-config\") pod \"kube-controller-manager-operator-78b949d7b-2n9v8\" (UID: \"d488de1f-84dd-45c5-a21f-16fd53d4e55a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2n9v8" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.187252 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d488de1f-84dd-45c5-a21f-16fd53d4e55a-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2n9v8\" (UID: \"d488de1f-84dd-45c5-a21f-16fd53d4e55a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2n9v8" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.187275 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7ffe8020-73ec-4a69-b53f-7be8548df67b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-gzgps\" (UID: \"7ffe8020-73ec-4a69-b53f-7be8548df67b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.187313 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7-etcd-service-ca\") pod \"etcd-operator-b45778765-tzv7k\" (UID: \"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.188171 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7-etcd-service-ca\") pod \"etcd-operator-b45778765-tzv7k\" (UID: \"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.194854 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/91562083-eeea-41be-8455-e5fb3ff57453-csi-data-dir\") pod \"csi-hostpathplugin-7p95x\" (UID: \"91562083-eeea-41be-8455-e5fb3ff57453\") " pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.198655 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" event={"ID":"6d03cd7a-f608-45b5-901c-b01678e4b69a","Type":"ContainerStarted","Data":"2f8245acaa69ce464952f03f21db1a98b92095a2923a59299c5147d3cf523571"} Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.201028 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25273950-0e4d-409a-8387-b571c2b15a05-signing-cabundle\") pod \"service-ca-9c57cc56f-6shb9\" (UID: \"25273950-0e4d-409a-8387-b571c2b15a05\") " pod="openshift-service-ca/service-ca-9c57cc56f-6shb9" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.205150 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qr2x\" (UniqueName: \"kubernetes.io/projected/83cb65fc-a542-4331-80d2-2ebccf5d2bff-kube-api-access-6qr2x\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.207117 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/25285c06-58d0-4f7c-a04c-ff944a2c7add-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-nrlwx\" (UID: \"25285c06-58d0-4f7c-a04c-ff944a2c7add\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nrlwx" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.207602 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/fa5d5cf0-2956-4bb1-9372-0f858ff17342-tmpfs\") pod \"packageserver-d55dfcdfc-ln8tm\" (UID: \"fa5d5cf0-2956-4bb1-9372-0f858ff17342\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.210907 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25285c06-58d0-4f7c-a04c-ff944a2c7add-config\") pod \"kube-apiserver-operator-766d6c64bb-nrlwx\" (UID: \"25285c06-58d0-4f7c-a04c-ff944a2c7add\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nrlwx" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.210993 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/bfc4f256-0f84-4e7e-8e29-8e196911ee59-node-bootstrap-token\") pod \"machine-config-server-tcrlf\" (UID: \"bfc4f256-0f84-4e7e-8e29-8e196911ee59\") " pod="openshift-machine-config-operator/machine-config-server-tcrlf" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.211063 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9c657c6a-60d0-4e3f-b70e-c602bb01bf7c-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-s5sr5\" (UID: \"9c657c6a-60d0-4e3f-b70e-c602bb01bf7c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-s5sr5" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.211616 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8c1203d5-c663-4f5a-96b5-ca6b398114bd-trusted-ca\") pod \"ingress-operator-5b745b69d9-nqr5d\" (UID: \"8c1203d5-c663-4f5a-96b5-ca6b398114bd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.213585 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/91562083-eeea-41be-8455-e5fb3ff57453-mountpoint-dir\") pod \"csi-hostpathplugin-7p95x\" (UID: \"91562083-eeea-41be-8455-e5fb3ff57453\") " pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.213854 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-svfhs" event={"ID":"3ce9501a-7d19-42bf-94fc-b63427ef3c12","Type":"ContainerStarted","Data":"99c711dbf6877709c802d6d41a5a92804abaf14b27b177a2e569f416da497706"} Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.214177 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/206f5b8b-75ee-44ae-bae5-dfd6bc829b9b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-c8b5s\" (UID: \"206f5b8b-75ee-44ae-bae5-dfd6bc829b9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c8b5s" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.214367 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/bfc4f256-0f84-4e7e-8e29-8e196911ee59-certs\") pod \"machine-config-server-tcrlf\" (UID: \"bfc4f256-0f84-4e7e-8e29-8e196911ee59\") " pod="openshift-machine-config-operator/machine-config-server-tcrlf" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.214838 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7-etcd-ca\") pod \"etcd-operator-b45778765-tzv7k\" (UID: \"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.215775 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3719873e-72ba-4b39-a8d5-8f4b3196a96d-config\") pod \"service-ca-operator-777779d784-wmcvd\" (UID: \"3719873e-72ba-4b39-a8d5-8f4b3196a96d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wmcvd" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.215861 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d488de1f-84dd-45c5-a21f-16fd53d4e55a-config\") pod \"kube-controller-manager-operator-78b949d7b-2n9v8\" (UID: \"d488de1f-84dd-45c5-a21f-16fd53d4e55a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2n9v8" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.216729 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7-config\") pod \"etcd-operator-b45778765-tzv7k\" (UID: \"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.217603 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a729b53-a69c-4fe8-a888-f61aa0856910-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-p4zdq\" (UID: \"1a729b53-a69c-4fe8-a888-f61aa0856910\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p4zdq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.217880 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/91562083-eeea-41be-8455-e5fb3ff57453-plugins-dir\") pod \"csi-hostpathplugin-7p95x\" (UID: \"91562083-eeea-41be-8455-e5fb3ff57453\") " pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.220485 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/206f5b8b-75ee-44ae-bae5-dfd6bc829b9b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-c8b5s\" (UID: \"206f5b8b-75ee-44ae-bae5-dfd6bc829b9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c8b5s" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.221697 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf9a4b55-b2dd-497c-aee6-c2fa241d5b33-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-j692h\" (UID: \"bf9a4b55-b2dd-497c-aee6-c2fa241d5b33\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j692h" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.221716 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvk5h\" (UniqueName: \"kubernetes.io/projected/1e22fddb-6aeb-415e-bea6-b591462f42a8-kube-api-access-pvk5h\") pod \"apiserver-7bbb656c7d-nshv4\" (UID: \"1e22fddb-6aeb-415e-bea6-b591462f42a8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.221945 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/91562083-eeea-41be-8455-e5fb3ff57453-socket-dir\") pod \"csi-hostpathplugin-7p95x\" (UID: \"91562083-eeea-41be-8455-e5fb3ff57453\") " pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.221967 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" event={"ID":"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f","Type":"ContainerStarted","Data":"b484653e66c6043a6a4438f8bcff92d255400e9144dbc8d0d7894c4aa58a7934"} Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.222403 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bkcmk\" (UID: \"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76\") " pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.222474 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.223304 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44knz" event={"ID":"cec358c7-a361-4654-9800-6e275a41c909","Type":"ContainerStarted","Data":"251b4b11e83c348140f292bafe7d16539c9c36fe8ae57bda23cff4439d82ccb0"} Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.225848 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7ffe8020-73ec-4a69-b53f-7be8548df67b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-gzgps\" (UID: \"7ffe8020-73ec-4a69-b53f-7be8548df67b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.225988 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25273950-0e4d-409a-8387-b571c2b15a05-signing-key\") pod \"service-ca-9c57cc56f-6shb9\" (UID: \"25273950-0e4d-409a-8387-b571c2b15a05\") " pod="openshift-service-ca/service-ca-9c57cc56f-6shb9" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.226116 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/91562083-eeea-41be-8455-e5fb3ff57453-registration-dir\") pod \"csi-hostpathplugin-7p95x\" (UID: \"91562083-eeea-41be-8455-e5fb3ff57453\") " pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.229977 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01-secret-volume\") pod \"collect-profiles-29422485-p8qrm\" (UID: \"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.233187 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b62zd"] Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.236460 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7-etcd-client\") pod \"etcd-operator-b45778765-tzv7k\" (UID: \"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.236590 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2b45c80a-66df-4cda-9b80-33101f1f8060-metrics-tls\") pod \"dns-default-ns797\" (UID: \"2b45c80a-66df-4cda-9b80-33101f1f8060\") " pod="openshift-dns/dns-default-ns797" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.236908 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2lwf\" (UniqueName: \"kubernetes.io/projected/6256c135-6830-47c9-858c-ad896f3cdee8-kube-api-access-b2lwf\") pod \"multus-admission-controller-857f4d67dd-d8jqz\" (UID: \"6256c135-6830-47c9-858c-ad896f3cdee8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-d8jqz" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.237028 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fa5d5cf0-2956-4bb1-9372-0f858ff17342-apiservice-cert\") pod \"packageserver-d55dfcdfc-ln8tm\" (UID: \"fa5d5cf0-2956-4bb1-9372-0f858ff17342\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.237087 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3719873e-72ba-4b39-a8d5-8f4b3196a96d-serving-cert\") pod \"service-ca-operator-777779d784-wmcvd\" (UID: \"3719873e-72ba-4b39-a8d5-8f4b3196a96d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wmcvd" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.237243 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fa5d5cf0-2956-4bb1-9372-0f858ff17342-webhook-cert\") pod \"packageserver-d55dfcdfc-ln8tm\" (UID: \"fa5d5cf0-2956-4bb1-9372-0f858ff17342\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.237409 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a729b53-a69c-4fe8-a888-f61aa0856910-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-p4zdq\" (UID: \"1a729b53-a69c-4fe8-a888-f61aa0856910\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p4zdq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.237418 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8c1203d5-c663-4f5a-96b5-ca6b398114bd-metrics-tls\") pod \"ingress-operator-5b745b69d9-nqr5d\" (UID: \"8c1203d5-c663-4f5a-96b5-ca6b398114bd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.237469 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d488de1f-84dd-45c5-a21f-16fd53d4e55a-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2n9v8\" (UID: \"d488de1f-84dd-45c5-a21f-16fd53d4e55a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2n9v8" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.237909 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/09f3f071-b7af-4e7f-b459-b5316393be20-cert\") pod \"ingress-canary-4xcxz\" (UID: \"09f3f071-b7af-4e7f-b459-b5316393be20\") " pod="openshift-ingress-canary/ingress-canary-4xcxz" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.238061 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bkcmk\" (UID: \"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76\") " pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.238555 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/7ffe8020-73ec-4a69-b53f-7be8548df67b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-gzgps\" (UID: \"7ffe8020-73ec-4a69-b53f-7be8548df67b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.268338 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjszt\" (UniqueName: \"kubernetes.io/projected/8c1203d5-c663-4f5a-96b5-ca6b398114bd-kube-api-access-pjszt\") pod \"ingress-operator-5b745b69d9-nqr5d\" (UID: \"8c1203d5-c663-4f5a-96b5-ca6b398114bd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.300140 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7ffe8020-73ec-4a69-b53f-7be8548df67b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-gzgps\" (UID: \"7ffe8020-73ec-4a69-b53f-7be8548df67b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.350086 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4n8z5\" (UniqueName: \"kubernetes.io/projected/1a729b53-a69c-4fe8-a888-f61aa0856910-kube-api-access-4n8z5\") pod \"kube-storage-version-migrator-operator-b67b599dd-p4zdq\" (UID: \"1a729b53-a69c-4fe8-a888-f61aa0856910\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p4zdq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.361324 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.370472 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p4zdq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.377955 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.387936 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq75m\" (UniqueName: \"kubernetes.io/projected/fa5d5cf0-2956-4bb1-9372-0f858ff17342-kube-api-access-sq75m\") pod \"packageserver-d55dfcdfc-ln8tm\" (UID: \"fa5d5cf0-2956-4bb1-9372-0f858ff17342\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.414412 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.438412 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-qsz8j" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.455250 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/25285c06-58d0-4f7c-a04c-ff944a2c7add-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-nrlwx\" (UID: \"25285c06-58d0-4f7c-a04c-ff944a2c7add\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nrlwx" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.460917 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-f6lqh"] Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.465377 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8c1203d5-c663-4f5a-96b5-ca6b398114bd-bound-sa-token\") pod \"ingress-operator-5b745b69d9-nqr5d\" (UID: \"8c1203d5-c663-4f5a-96b5-ca6b398114bd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.484927 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-7m9zf"] Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.491315 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-d8jqz" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.491847 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlzzm\" (UniqueName: \"kubernetes.io/projected/9c657c6a-60d0-4e3f-b70e-c602bb01bf7c-kube-api-access-zlzzm\") pod \"package-server-manager-789f6589d5-s5sr5\" (UID: \"9c657c6a-60d0-4e3f-b70e-c602bb01bf7c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-s5sr5" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.495983 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-49sgb"] Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.507535 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lcxv\" (UniqueName: \"kubernetes.io/projected/bfc4f256-0f84-4e7e-8e29-8e196911ee59-kube-api-access-7lcxv\") pod \"machine-config-server-tcrlf\" (UID: \"bfc4f256-0f84-4e7e-8e29-8e196911ee59\") " pod="openshift-machine-config-operator/machine-config-server-tcrlf" Dec 10 06:50:18 crc kubenswrapper[4765]: W1210 06:50:18.508599 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf0f8b6b6_1a80_4835_b50e_26dc93c985a0.slice/crio-28e96d85fce80b0b262c45aa6f4bed3efe02f9bbc2e2632cd3023d70996ca6b6 WatchSource:0}: Error finding container 28e96d85fce80b0b262c45aa6f4bed3efe02f9bbc2e2632cd3023d70996ca6b6: Status 404 returned error can't find the container with id 28e96d85fce80b0b262c45aa6f4bed3efe02f9bbc2e2632cd3023d70996ca6b6 Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.533309 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8vzq\" (UniqueName: \"kubernetes.io/projected/2b45c80a-66df-4cda-9b80-33101f1f8060-kube-api-access-f8vzq\") pod \"dns-default-ns797\" (UID: \"2b45c80a-66df-4cda-9b80-33101f1f8060\") " pod="openshift-dns/dns-default-ns797" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.544453 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzgsm\" (UniqueName: \"kubernetes.io/projected/91562083-eeea-41be-8455-e5fb3ff57453-kube-api-access-lzgsm\") pod \"csi-hostpathplugin-7p95x\" (UID: \"91562083-eeea-41be-8455-e5fb3ff57453\") " pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.550534 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-s5sr5" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.642078 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/34ca3d50-d8f9-420b-941c-3d46bd866149-profile-collector-cert\") pod \"olm-operator-6b444d44fb-nrl2f\" (UID: \"34ca3d50-d8f9-420b-941c-3d46bd866149\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.642627 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2b45c80a-66df-4cda-9b80-33101f1f8060-config-volume\") pod \"dns-default-ns797\" (UID: \"2b45c80a-66df-4cda-9b80-33101f1f8060\") " pod="openshift-dns/dns-default-ns797" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.643010 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/34ca3d50-d8f9-420b-941c-3d46bd866149-srv-cert\") pod \"olm-operator-6b444d44fb-nrl2f\" (UID: \"34ca3d50-d8f9-420b-941c-3d46bd866149\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.643020 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01-config-volume\") pod \"collect-profiles-29422485-p8qrm\" (UID: \"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.643077 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-m9mvq\" (UID: \"4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-m9mvq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.646438 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" Dec 10 06:50:18 crc kubenswrapper[4765]: E1210 06:50:18.646837 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:19.146793307 +0000 UTC m=+138.873458623 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.652298 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7-serving-cert\") pod \"etcd-operator-b45778765-tzv7k\" (UID: \"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.655708 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxf9t\" (UniqueName: \"kubernetes.io/projected/09f3f071-b7af-4e7f-b459-b5316393be20-kube-api-access-sxf9t\") pod \"ingress-canary-4xcxz\" (UID: \"09f3f071-b7af-4e7f-b459-b5316393be20\") " pod="openshift-ingress-canary/ingress-canary-4xcxz" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.658686 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/206f5b8b-75ee-44ae-bae5-dfd6bc829b9b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-c8b5s\" (UID: \"206f5b8b-75ee-44ae-bae5-dfd6bc829b9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c8b5s" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.659671 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfk7f\" (UniqueName: \"kubernetes.io/projected/34ca3d50-d8f9-420b-941c-3d46bd866149-kube-api-access-zfk7f\") pod \"olm-operator-6b444d44fb-nrl2f\" (UID: \"34ca3d50-d8f9-420b-941c-3d46bd866149\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.660926 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-tcrlf" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.662390 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pswj7\" (UniqueName: \"kubernetes.io/projected/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01-kube-api-access-pswj7\") pod \"collect-profiles-29422485-p8qrm\" (UID: \"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.665078 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nrlwx" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.665686 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cthl4\" (UniqueName: \"kubernetes.io/projected/3719873e-72ba-4b39-a8d5-8f4b3196a96d-kube-api-access-cthl4\") pod \"service-ca-operator-777779d784-wmcvd\" (UID: \"3719873e-72ba-4b39-a8d5-8f4b3196a96d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wmcvd" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.665104 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d488de1f-84dd-45c5-a21f-16fd53d4e55a-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2n9v8\" (UID: \"d488de1f-84dd-45c5-a21f-16fd53d4e55a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2n9v8" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.670264 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndqmn\" (UniqueName: \"kubernetes.io/projected/9195e6d4-dee4-4450-9909-aa0d120dc162-kube-api-access-ndqmn\") pod \"migrator-59844c95c7-d48l7\" (UID: \"9195e6d4-dee4-4450-9909-aa0d120dc162\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-d48l7" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.670324 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqkkd\" (UniqueName: \"kubernetes.io/projected/7ffe8020-73ec-4a69-b53f-7be8548df67b-kube-api-access-hqkkd\") pod \"cluster-image-registry-operator-dc59b4c8b-gzgps\" (UID: \"7ffe8020-73ec-4a69-b53f-7be8548df67b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.673140 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-7p95x" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.673718 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr"] Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.675156 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7-proxy-tls\") pod \"machine-config-controller-84d6567774-m9mvq\" (UID: \"4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-m9mvq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.676874 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8slv4"] Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.677012 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njh2b\" (UniqueName: \"kubernetes.io/projected/bf9a4b55-b2dd-497c-aee6-c2fa241d5b33-kube-api-access-njh2b\") pod \"control-plane-machine-set-operator-78cbb6b69f-j692h\" (UID: \"bf9a4b55-b2dd-497c-aee6-c2fa241d5b33\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j692h" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.677306 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.677418 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjpqv\" (UniqueName: \"kubernetes.io/projected/25273950-0e4d-409a-8387-b571c2b15a05-kube-api-access-pjpqv\") pod \"service-ca-9c57cc56f-6shb9\" (UID: \"25273950-0e4d-409a-8387-b571c2b15a05\") " pod="openshift-service-ca/service-ca-9c57cc56f-6shb9" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.677986 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d" Dec 10 06:50:18 crc kubenswrapper[4765]: E1210 06:50:18.678072 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:19.178052683 +0000 UTC m=+138.904717999 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.681572 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dj4cq\" (UniqueName: \"kubernetes.io/projected/4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7-kube-api-access-dj4cq\") pod \"machine-config-controller-84d6567774-m9mvq\" (UID: \"4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-m9mvq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.686909 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gfzb\" (UniqueName: \"kubernetes.io/projected/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76-kube-api-access-9gfzb\") pod \"marketplace-operator-79b997595-bkcmk\" (UID: \"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76\") " pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.690784 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c8b5s" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.692647 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-4xcxz" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.707164 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-ns797" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.707724 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpgj4\" (UniqueName: \"kubernetes.io/projected/3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7-kube-api-access-xpgj4\") pod \"etcd-operator-b45778765-tzv7k\" (UID: \"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.723768 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-cq24j"] Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.743896 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk"] Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.770360 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rz6tg"] Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.773516 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.783754 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:18 crc kubenswrapper[4765]: E1210 06:50:18.788099 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:19.288063797 +0000 UTC m=+139.014729113 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.792189 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-lxxm6"] Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.795898 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-qsz8j"] Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.799748 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:18 crc kubenswrapper[4765]: E1210 06:50:18.800326 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:19.30031131 +0000 UTC m=+139.026976626 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.812710 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-ljp6b"] Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.816540 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.825219 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-d48l7" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.842050 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.845136 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.859754 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2n9v8" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.891591 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j692h" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.896247 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-m9mvq" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.902736 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:18 crc kubenswrapper[4765]: E1210 06:50:18.903427 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:19.4034051 +0000 UTC m=+139.130070426 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.903528 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-6shb9" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.911333 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p4zdq"] Dec 10 06:50:18 crc kubenswrapper[4765]: W1210 06:50:18.914527 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod22e4de02_d14f_4a69_8a40_87380cd3ed44.slice/crio-a27cf002d0424ab4933aa8e3d982c92beaa5316149a3d2514df1b12ff548df53 WatchSource:0}: Error finding container a27cf002d0424ab4933aa8e3d982c92beaa5316149a3d2514df1b12ff548df53: Status 404 returned error can't find the container with id a27cf002d0424ab4933aa8e3d982c92beaa5316149a3d2514df1b12ff548df53 Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.921257 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.928153 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-fv6zj"] Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.928445 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wmcvd" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.938624 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm" Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.942928 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4"] Dec 10 06:50:18 crc kubenswrapper[4765]: I1210 06:50:18.953822 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-d8jqz"] Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.005128 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:19 crc kubenswrapper[4765]: E1210 06:50:19.005453 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:19.50544027 +0000 UTC m=+139.232105586 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.030339 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-s5sr5"] Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.106128 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:19 crc kubenswrapper[4765]: E1210 06:50:19.107385 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:19.607365407 +0000 UTC m=+139.334030723 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.107610 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:19 crc kubenswrapper[4765]: E1210 06:50:19.108009 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:19.607992035 +0000 UTC m=+139.334657351 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:19 crc kubenswrapper[4765]: W1210 06:50:19.132484 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbfc4f256_0f84_4e7e_8e29_8e196911ee59.slice/crio-16183dc87f1c44d6bb0cac5eaf953dcc70bf865ce4a9bc8d2fdbc80a4b595de4 WatchSource:0}: Error finding container 16183dc87f1c44d6bb0cac5eaf953dcc70bf865ce4a9bc8d2fdbc80a4b595de4: Status 404 returned error can't find the container with id 16183dc87f1c44d6bb0cac5eaf953dcc70bf865ce4a9bc8d2fdbc80a4b595de4 Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.210398 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:19 crc kubenswrapper[4765]: E1210 06:50:19.210599 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:19.71057209 +0000 UTC m=+139.437237406 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.210732 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:19 crc kubenswrapper[4765]: E1210 06:50:19.211156 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:19.711127756 +0000 UTC m=+139.437793062 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.241172 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" event={"ID":"2c029930-d67e-4812-a6e1-bb8d9b806655","Type":"ContainerStarted","Data":"f0e803afd6f5f15d92a7ce45c14d8c8bc790cad385a29e728c3ccd5a7577274d"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.246791 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-7m9zf" event={"ID":"47211fde-bde0-457f-8336-c46af9d7ee00","Type":"ContainerStarted","Data":"366f471c7d3d89992c8b007e1d9fc6aaa174deaeddc2c6106d615e4d94b2a870"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.249870 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-qsz8j" event={"ID":"22e4de02-d14f-4a69-8a40-87380cd3ed44","Type":"ContainerStarted","Data":"a27cf002d0424ab4933aa8e3d982c92beaa5316149a3d2514df1b12ff548df53"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.265185 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-fv6zj" event={"ID":"2d2049d7-de64-4070-959f-8cefd1f15e5d","Type":"ContainerStarted","Data":"92d830dfff9eb1d0c8ef55ced8000b4476e1e85f3c778db530f31e18a7146e25"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.266564 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" event={"ID":"c1ec79a4-45cc-43dd-883e-c6623922eff0","Type":"ContainerStarted","Data":"04dad49586897ce69d2470ac82b04ad2895abadbde3e054270154e998b768392"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.294859 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr" event={"ID":"412763c1-3872-4843-9454-988bfa904c52","Type":"ContainerStarted","Data":"764a91542b86319574ca64c739afca9bb9ddaaf9fd2786e9128417a2408e44be"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.301275 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44knz" event={"ID":"cec358c7-a361-4654-9800-6e275a41c909","Type":"ContainerStarted","Data":"44938376db948fde759b9cf49e62e25d84d9694d17f163bdc98efb2f22636386"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.311312 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8slv4" event={"ID":"cc4688ee-1a54-48c3-b328-855143ddfe38","Type":"ContainerStarted","Data":"2e87cd52c376379d0048ee2088ccffc9c186044745dc02adc5b0c3d91d8b381c"} Dec 10 06:50:19 crc kubenswrapper[4765]: E1210 06:50:19.312369 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:19.812326242 +0000 UTC m=+139.538991559 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.313615 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.314100 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:19 crc kubenswrapper[4765]: E1210 06:50:19.314516 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:19.814499333 +0000 UTC m=+139.541164649 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.332394 4765 generic.go:334] "Generic (PLEG): container finished" podID="6d03cd7a-f608-45b5-901c-b01678e4b69a" containerID="82882534fa43c32a84585f92ac2b0947ee937c11715d96d673712863ee36441d" exitCode=0 Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.332702 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" event={"ID":"6d03cd7a-f608-45b5-901c-b01678e4b69a","Type":"ContainerDied","Data":"82882534fa43c32a84585f92ac2b0947ee937c11715d96d673712863ee36441d"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.339265 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-d8jqz" event={"ID":"6256c135-6830-47c9-858c-ad896f3cdee8","Type":"ContainerStarted","Data":"39ee97dd9ffad8e7498dd52d7078d81f1c8d13d43a7bbe976751a8275c14abfb"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.343670 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-tcrlf" event={"ID":"bfc4f256-0f84-4e7e-8e29-8e196911ee59","Type":"ContainerStarted","Data":"16183dc87f1c44d6bb0cac5eaf953dcc70bf865ce4a9bc8d2fdbc80a4b595de4"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.349386 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b62zd" event={"ID":"301fe92b-b1fa-46ce-a9f1-97c23550e4b9","Type":"ContainerStarted","Data":"463fc6a64c6e68a989b390728ffcf0fd73bfd9d7c63698227ab8e8a47f621945"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.352884 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-s5sr5" event={"ID":"9c657c6a-60d0-4e3f-b70e-c602bb01bf7c","Type":"ContainerStarted","Data":"b53abaeca4375ad944411fab685970dba59789e54850a083404d91254844df9d"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.363237 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rz6tg" event={"ID":"40915631-1b59-4783-8633-88ea61ed4814","Type":"ContainerStarted","Data":"5f82b45fb7759f499a216b65ce07da7fac4a3056754681e6f9862979272334e8"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.364810 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-lxxm6" event={"ID":"7b597651-9700-4c4b-9d2d-c21dc37c1959","Type":"ContainerStarted","Data":"e4d582dc88a3e6f3304a248be6f4fdb8da43c8e29359d0794099554d7e13a50e"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.412030 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-svfhs" event={"ID":"3ce9501a-7d19-42bf-94fc-b63427ef3c12","Type":"ContainerStarted","Data":"3842319eff57e8e36169d5c4793f1f5d81376a8f32479e9c660ba43a6da0a754"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.416179 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:19 crc kubenswrapper[4765]: E1210 06:50:19.417394 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:19.917362957 +0000 UTC m=+139.644028273 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.463867 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-49sgb" event={"ID":"82306cc4-ec57-498a-8481-5832db533206","Type":"ContainerStarted","Data":"a88703de7fcf30b35e2e1db0850cd066897b764a485bf40c54ff7032a36a3341"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.465643 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" event={"ID":"1e22fddb-6aeb-415e-bea6-b591462f42a8","Type":"ContainerStarted","Data":"397dca2c311e35e6a74ed38d6465d3af280f6f498d2feaa09e6c8d0bea93a038"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.471131 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk" event={"ID":"468222f3-9fe0-4909-b72b-ec659ca3908e","Type":"ContainerStarted","Data":"d0bfc191ff56c876431f0aed0da2504f6ace70387bc34a89f9c9b9ab82038dbb"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.478948 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nrlwx"] Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.480794 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9rf4c" event={"ID":"e33fd3b0-8406-4675-91fc-ed8b1b3e7cef","Type":"ContainerStarted","Data":"fbd2ebd72b636e5962f9f1794467373519e4a481f4233dc0bc131de9ac7432ca"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.485721 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p4zdq" event={"ID":"1a729b53-a69c-4fe8-a888-f61aa0856910","Type":"ContainerStarted","Data":"81de44ce1c58d6c9a6efe5a8375df419c0bae11ba971a4e0caeb301b44633ac0"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.486383 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm"] Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.499580 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" event={"ID":"f0f8b6b6-1a80-4835-b50e-26dc93c985a0","Type":"ContainerStarted","Data":"6b7b9d988a0aadd48fb9acae4b0b505e0f09cef1e82a4959707530005da82f90"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.499639 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.499656 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" event={"ID":"f0f8b6b6-1a80-4835-b50e-26dc93c985a0","Type":"ContainerStarted","Data":"28e96d85fce80b0b262c45aa6f4bed3efe02f9bbc2e2632cd3023d70996ca6b6"} Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.517987 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:19 crc kubenswrapper[4765]: E1210 06:50:19.519487 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:20.019453949 +0000 UTC m=+139.746119255 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.527822 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.620354 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:19 crc kubenswrapper[4765]: E1210 06:50:19.620785 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:20.120762488 +0000 UTC m=+139.847427814 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.621010 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:19 crc kubenswrapper[4765]: E1210 06:50:19.622295 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:20.12226193 +0000 UTC m=+139.848927446 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.722526 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:19 crc kubenswrapper[4765]: E1210 06:50:19.722673 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:20.222650605 +0000 UTC m=+139.949315921 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.723238 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:19 crc kubenswrapper[4765]: E1210 06:50:19.723612 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:20.223601961 +0000 UTC m=+139.950267277 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.781254 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" podStartSLOduration=118.781234207 podStartE2EDuration="1m58.781234207s" podCreationTimestamp="2025-12-10 06:48:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:19.77957256 +0000 UTC m=+139.506237896" watchObservedRunningTime="2025-12-10 06:50:19.781234207 +0000 UTC m=+139.507899513" Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.824573 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:19 crc kubenswrapper[4765]: E1210 06:50:19.824954 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:20.324932752 +0000 UTC m=+140.051598078 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.825523 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-svfhs" podStartSLOduration=119.825505178 podStartE2EDuration="1m59.825505178s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:19.824677955 +0000 UTC m=+139.551343281" watchObservedRunningTime="2025-12-10 06:50:19.825505178 +0000 UTC m=+139.552170494" Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.848689 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.862470 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:19 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:19 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:19 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.862541 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.927585 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:19 crc kubenswrapper[4765]: E1210 06:50:19.927934 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:20.427920619 +0000 UTC m=+140.154585935 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.944961 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8slv4" podStartSLOduration=119.944936526 podStartE2EDuration="1m59.944936526s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:19.94223136 +0000 UTC m=+139.668896676" watchObservedRunningTime="2025-12-10 06:50:19.944936526 +0000 UTC m=+139.671601842" Dec 10 06:50:19 crc kubenswrapper[4765]: I1210 06:50:19.982810 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" podStartSLOduration=119.982788777 podStartE2EDuration="1m59.982788777s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:19.971263714 +0000 UTC m=+139.697929020" watchObservedRunningTime="2025-12-10 06:50:19.982788777 +0000 UTC m=+139.709454093" Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.013862 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d"] Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.044629 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-4xcxz"] Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.045721 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:20 crc kubenswrapper[4765]: E1210 06:50:20.046348 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:20.546328898 +0000 UTC m=+140.272994214 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.073653 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-ns797"] Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.085399 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f"] Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.155225 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:20 crc kubenswrapper[4765]: E1210 06:50:20.155576 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:20.65556447 +0000 UTC m=+140.382229786 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.256400 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:20 crc kubenswrapper[4765]: E1210 06:50:20.256872 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:20.756852759 +0000 UTC m=+140.483518075 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:20 crc kubenswrapper[4765]: W1210 06:50:20.256956 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod09f3f071_b7af_4e7f_b459_b5316393be20.slice/crio-0881b3759233c022431aa93d7855233acd7e911412a96387b90927bea8e876e4 WatchSource:0}: Error finding container 0881b3759233c022431aa93d7855233acd7e911412a96387b90927bea8e876e4: Status 404 returned error can't find the container with id 0881b3759233c022431aa93d7855233acd7e911412a96387b90927bea8e876e4 Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.314675 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2n9v8"] Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.362926 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:20 crc kubenswrapper[4765]: E1210 06:50:20.363282 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:20.863270172 +0000 UTC m=+140.589935488 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.394063 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-tzv7k"] Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.404316 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-d48l7"] Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.407900 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7p95x"] Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.424884 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6shb9"] Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.464039 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:20 crc kubenswrapper[4765]: E1210 06:50:20.464236 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:20.964212011 +0000 UTC m=+140.690877327 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.464793 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:20 crc kubenswrapper[4765]: E1210 06:50:20.465158 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:20.965143207 +0000 UTC m=+140.691808523 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.472160 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm"] Dec 10 06:50:20 crc kubenswrapper[4765]: W1210 06:50:20.480248 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd488de1f_84dd_45c5_a21f_16fd53d4e55a.slice/crio-27ef4aec1bf9bf3b59ff983e42e5287f14887d84588af2ef1ebeae9b4bd33a9c WatchSource:0}: Error finding container 27ef4aec1bf9bf3b59ff983e42e5287f14887d84588af2ef1ebeae9b4bd33a9c: Status 404 returned error can't find the container with id 27ef4aec1bf9bf3b59ff983e42e5287f14887d84588af2ef1ebeae9b4bd33a9c Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.498751 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c8b5s"] Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.503036 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ns797" event={"ID":"2b45c80a-66df-4cda-9b80-33101f1f8060","Type":"ContainerStarted","Data":"2f8f2272cdc243043c038214e8841bbee638f3f052fc62f926c77dfafc16ba9c"} Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.515352 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d" event={"ID":"8c1203d5-c663-4f5a-96b5-ca6b398114bd","Type":"ContainerStarted","Data":"cbd93fc5415aa7a45ff2f610a85fb4e28a78677d2715eeab74c6c083f7676a2f"} Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.537977 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nrlwx" event={"ID":"25285c06-58d0-4f7c-a04c-ff944a2c7add","Type":"ContainerStarted","Data":"f3ab8f43dc280919e59610eca921acef8ea6932df82a91f028f5d5fdbbf03f4c"} Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.543190 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" event={"ID":"c1ec79a4-45cc-43dd-883e-c6623922eff0","Type":"ContainerStarted","Data":"6449308147d2226dc6cea12df5ea617f9e2140c8ffc8b9334216f6ac7b11211b"} Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.547192 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps"] Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.547981 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr" event={"ID":"412763c1-3872-4843-9454-988bfa904c52","Type":"ContainerStarted","Data":"a154444c1b49b1f9c32e4f178e2b985f84f1a343ab3555d791ea4cbd501ad690"} Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.548756 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr" Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.552979 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f" event={"ID":"34ca3d50-d8f9-420b-941c-3d46bd866149","Type":"ContainerStarted","Data":"1d047ca7302468a2b2e7bb30ee76bf27bc35ce4ad75785228d8ac4b95239a349"} Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.565780 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:20 crc kubenswrapper[4765]: E1210 06:50:20.566337 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:21.066315893 +0000 UTC m=+140.792981209 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.590478 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr" Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.590853 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-4xcxz" event={"ID":"09f3f071-b7af-4e7f-b459-b5316393be20","Type":"ContainerStarted","Data":"0881b3759233c022431aa93d7855233acd7e911412a96387b90927bea8e876e4"} Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.612199 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" event={"ID":"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7","Type":"ContainerStarted","Data":"28f509d989b60b77084bc48778ff75153f9cd82ed5482d1d140c018273a8b6d8"} Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.626977 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2n9v8" event={"ID":"d488de1f-84dd-45c5-a21f-16fd53d4e55a","Type":"ContainerStarted","Data":"27ef4aec1bf9bf3b59ff983e42e5287f14887d84588af2ef1ebeae9b4bd33a9c"} Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.641144 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-8slv4" event={"ID":"cc4688ee-1a54-48c3-b328-855143ddfe38","Type":"ContainerStarted","Data":"2459ce5cf250d837ecce197cfbdd51859798aca01c9719824ad98ad169ba6cac"} Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.653996 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-7m9zf" event={"ID":"47211fde-bde0-457f-8336-c46af9d7ee00","Type":"ContainerStarted","Data":"4f2e8a4303e08984d8cf69eedfe941510428a7e4fb3ad34669ab84ffc221bab4"} Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.654586 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-7m9zf" Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.671687 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:20 crc kubenswrapper[4765]: E1210 06:50:20.673166 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:21.173150328 +0000 UTC m=+140.899815644 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:20 crc kubenswrapper[4765]: W1210 06:50:20.686234 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91562083_eeea_41be_8455_e5fb3ff57453.slice/crio-3d45b8f6cd04c1250f44290b90c9b9940e877e8cd241f0d0fb11707cfff68736 WatchSource:0}: Error finding container 3d45b8f6cd04c1250f44290b90c9b9940e877e8cd241f0d0fb11707cfff68736: Status 404 returned error can't find the container with id 3d45b8f6cd04c1250f44290b90c9b9940e877e8cd241f0d0fb11707cfff68736 Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.687232 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" event={"ID":"fa5d5cf0-2956-4bb1-9372-0f858ff17342","Type":"ContainerStarted","Data":"feb8e07992036ad2eaee55c4591f22304489dd625c0b8ed43eae176238d3e125"} Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.699975 4765 generic.go:334] "Generic (PLEG): container finished" podID="82306cc4-ec57-498a-8481-5832db533206" containerID="887322c10d2b6164ff641f2f9f86a6a8be71aeeb48ff4f85c7214cdd343b2087" exitCode=0 Dec 10 06:50:20 crc kubenswrapper[4765]: W1210 06:50:20.700485 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc444ed13_88e9_41b6_a7d6_c1fa51cb7e01.slice/crio-2e1c516b8c189971fcae4df7c5359edd90c4d77d14564a8cb4919b0597cd18dc WatchSource:0}: Error finding container 2e1c516b8c189971fcae4df7c5359edd90c4d77d14564a8cb4919b0597cd18dc: Status 404 returned error can't find the container with id 2e1c516b8c189971fcae4df7c5359edd90c4d77d14564a8cb4919b0597cd18dc Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.700934 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-49sgb" event={"ID":"82306cc4-ec57-498a-8481-5832db533206","Type":"ContainerDied","Data":"887322c10d2b6164ff641f2f9f86a6a8be71aeeb48ff4f85c7214cdd343b2087"} Dec 10 06:50:20 crc kubenswrapper[4765]: W1210 06:50:20.721354 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod206f5b8b_75ee_44ae_bae5_dfd6bc829b9b.slice/crio-032f6cba9b4b16f041a758ffb32aa39d7834d639ac8d7f5716515b8e74c9b5d1 WatchSource:0}: Error finding container 032f6cba9b4b16f041a758ffb32aa39d7834d639ac8d7f5716515b8e74c9b5d1: Status 404 returned error can't find the container with id 032f6cba9b4b16f041a758ffb32aa39d7834d639ac8d7f5716515b8e74c9b5d1 Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.722943 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gzldr" podStartSLOduration=120.722924803 podStartE2EDuration="2m0.722924803s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:20.721834722 +0000 UTC m=+140.448500028" watchObservedRunningTime="2025-12-10 06:50:20.722924803 +0000 UTC m=+140.449590119" Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.723894 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-cq24j" podStartSLOduration=120.72388699 podStartE2EDuration="2m0.72388699s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:20.626560902 +0000 UTC m=+140.353226228" watchObservedRunningTime="2025-12-10 06:50:20.72388699 +0000 UTC m=+140.450552306" Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.739379 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bkcmk"] Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.752101 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j692h"] Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.758358 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wmcvd"] Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.774287 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-m9mvq"] Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.778140 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:20 crc kubenswrapper[4765]: E1210 06:50:20.821042 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:21.321008542 +0000 UTC m=+141.047673858 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.856736 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-7m9zf" Dec 10 06:50:20 crc kubenswrapper[4765]: W1210 06:50:20.888960 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf9a4b55_b2dd_497c_aee6_c2fa241d5b33.slice/crio-79950f88858e2e509a33e9c3f64a30eaff28415ad629877ff8cbbb016f381d95 WatchSource:0}: Error finding container 79950f88858e2e509a33e9c3f64a30eaff28415ad629877ff8cbbb016f381d95: Status 404 returned error can't find the container with id 79950f88858e2e509a33e9c3f64a30eaff28415ad629877ff8cbbb016f381d95 Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.889479 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:20 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:20 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:20 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.889527 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.916348 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-7m9zf" podStartSLOduration=120.916330234 podStartE2EDuration="2m0.916330234s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:20.796421603 +0000 UTC m=+140.523086919" watchObservedRunningTime="2025-12-10 06:50:20.916330234 +0000 UTC m=+140.642995540" Dec 10 06:50:20 crc kubenswrapper[4765]: I1210 06:50:20.930679 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:20 crc kubenswrapper[4765]: E1210 06:50:20.931355 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:21.431338965 +0000 UTC m=+141.158004281 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.032308 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:21 crc kubenswrapper[4765]: E1210 06:50:21.033175 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:21.533155069 +0000 UTC m=+141.259820385 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.141056 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:21 crc kubenswrapper[4765]: E1210 06:50:21.141718 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:21.641702332 +0000 UTC m=+141.368367648 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.245846 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:21 crc kubenswrapper[4765]: E1210 06:50:21.246497 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:21.746479419 +0000 UTC m=+141.473144735 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.351383 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:21 crc kubenswrapper[4765]: E1210 06:50:21.352209 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:21.852196852 +0000 UTC m=+141.578862168 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.452638 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:21 crc kubenswrapper[4765]: E1210 06:50:21.453595 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:21.953569644 +0000 UTC m=+141.680234960 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.454135 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:21 crc kubenswrapper[4765]: E1210 06:50:21.454719 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:21.954707046 +0000 UTC m=+141.681372362 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.552062 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dzrqt"] Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.557719 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:21 crc kubenswrapper[4765]: E1210 06:50:21.557787 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:22.057769935 +0000 UTC m=+141.784435251 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.562594 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.562764 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dzrqt" Dec 10 06:50:21 crc kubenswrapper[4765]: E1210 06:50:21.563018 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:22.063006512 +0000 UTC m=+141.789671828 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.567830 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.577389 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dzrqt"] Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.664639 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:21 crc kubenswrapper[4765]: E1210 06:50:21.665004 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:22.16498913 +0000 UTC m=+141.891654446 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.749533 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-l9jw8"] Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.750496 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l9jw8" Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.753003 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d" event={"ID":"8c1203d5-c663-4f5a-96b5-ca6b398114bd","Type":"ContainerStarted","Data":"546d3817677fb95569f83bfe438b0c93ce604de8a1e560900dcb4e92b52ef1a0"} Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.770568 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.770633 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8xss\" (UniqueName: \"kubernetes.io/projected/c073bf0a-0806-4af1-9902-a0fe221901be-kube-api-access-n8xss\") pod \"community-operators-dzrqt\" (UID: \"c073bf0a-0806-4af1-9902-a0fe221901be\") " pod="openshift-marketplace/community-operators-dzrqt" Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.770679 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c073bf0a-0806-4af1-9902-a0fe221901be-catalog-content\") pod \"community-operators-dzrqt\" (UID: \"c073bf0a-0806-4af1-9902-a0fe221901be\") " pod="openshift-marketplace/community-operators-dzrqt" Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.770703 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c073bf0a-0806-4af1-9902-a0fe221901be-utilities\") pod \"community-operators-dzrqt\" (UID: \"c073bf0a-0806-4af1-9902-a0fe221901be\") " pod="openshift-marketplace/community-operators-dzrqt" Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.770574 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 10 06:50:21 crc kubenswrapper[4765]: E1210 06:50:21.771042 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:22.271023503 +0000 UTC m=+141.997688819 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.788391 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" event={"ID":"2c029930-d67e-4812-a6e1-bb8d9b806655","Type":"ContainerStarted","Data":"ba10ccf49ca766bca86227c81c8f1633eb66e01e2774efaefa1886103e06592a"} Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.789609 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.802614 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l9jw8"] Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.842109 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44knz" event={"ID":"cec358c7-a361-4654-9800-6e275a41c909","Type":"ContainerStarted","Data":"72fc9fce3b7d3d7ee32d341aebcb9ebf99e330c068c16c3d4b102b80b225965c"} Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.851916 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:21 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:21 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:21 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.851968 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.875260 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.875516 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7909736-a9e6-4791-942b-31997fe6d3ee-utilities\") pod \"certified-operators-l9jw8\" (UID: \"a7909736-a9e6-4791-942b-31997fe6d3ee\") " pod="openshift-marketplace/certified-operators-l9jw8" Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.875555 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c073bf0a-0806-4af1-9902-a0fe221901be-catalog-content\") pod \"community-operators-dzrqt\" (UID: \"c073bf0a-0806-4af1-9902-a0fe221901be\") " pod="openshift-marketplace/community-operators-dzrqt" Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.875571 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c073bf0a-0806-4af1-9902-a0fe221901be-utilities\") pod \"community-operators-dzrqt\" (UID: \"c073bf0a-0806-4af1-9902-a0fe221901be\") " pod="openshift-marketplace/community-operators-dzrqt" Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.875592 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7909736-a9e6-4791-942b-31997fe6d3ee-catalog-content\") pod \"certified-operators-l9jw8\" (UID: \"a7909736-a9e6-4791-942b-31997fe6d3ee\") " pod="openshift-marketplace/certified-operators-l9jw8" Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.875643 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvljv\" (UniqueName: \"kubernetes.io/projected/a7909736-a9e6-4791-942b-31997fe6d3ee-kube-api-access-gvljv\") pod \"certified-operators-l9jw8\" (UID: \"a7909736-a9e6-4791-942b-31997fe6d3ee\") " pod="openshift-marketplace/certified-operators-l9jw8" Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.875691 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8xss\" (UniqueName: \"kubernetes.io/projected/c073bf0a-0806-4af1-9902-a0fe221901be-kube-api-access-n8xss\") pod \"community-operators-dzrqt\" (UID: \"c073bf0a-0806-4af1-9902-a0fe221901be\") " pod="openshift-marketplace/community-operators-dzrqt" Dec 10 06:50:21 crc kubenswrapper[4765]: E1210 06:50:21.876072 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:22.376057377 +0000 UTC m=+142.102722693 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.876504 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c073bf0a-0806-4af1-9902-a0fe221901be-catalog-content\") pod \"community-operators-dzrqt\" (UID: \"c073bf0a-0806-4af1-9902-a0fe221901be\") " pod="openshift-marketplace/community-operators-dzrqt" Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.876678 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c073bf0a-0806-4af1-9902-a0fe221901be-utilities\") pod \"community-operators-dzrqt\" (UID: \"c073bf0a-0806-4af1-9902-a0fe221901be\") " pod="openshift-marketplace/community-operators-dzrqt" Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.898397 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-4xcxz" event={"ID":"09f3f071-b7af-4e7f-b459-b5316393be20","Type":"ContainerStarted","Data":"b0b9c96bd742ab6d8aea49f01c85f1181e7d260757539670be5542d72ee7ff0a"} Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.969252 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8xss\" (UniqueName: \"kubernetes.io/projected/c073bf0a-0806-4af1-9902-a0fe221901be-kube-api-access-n8xss\") pod \"community-operators-dzrqt\" (UID: \"c073bf0a-0806-4af1-9902-a0fe221901be\") " pod="openshift-marketplace/community-operators-dzrqt" Dec 10 06:50:21 crc kubenswrapper[4765]: I1210 06:50:21.978824 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-s5sr5" event={"ID":"9c657c6a-60d0-4e3f-b70e-c602bb01bf7c","Type":"ContainerStarted","Data":"e21a0115477ff41ffde548ab98668065c973103eca447d7e0871fe7e174a3892"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.020672 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7909736-a9e6-4791-942b-31997fe6d3ee-utilities\") pod \"certified-operators-l9jw8\" (UID: \"a7909736-a9e6-4791-942b-31997fe6d3ee\") " pod="openshift-marketplace/certified-operators-l9jw8" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.031059 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7909736-a9e6-4791-942b-31997fe6d3ee-catalog-content\") pod \"certified-operators-l9jw8\" (UID: \"a7909736-a9e6-4791-942b-31997fe6d3ee\") " pod="openshift-marketplace/certified-operators-l9jw8" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.031361 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvljv\" (UniqueName: \"kubernetes.io/projected/a7909736-a9e6-4791-942b-31997fe6d3ee-kube-api-access-gvljv\") pod \"certified-operators-l9jw8\" (UID: \"a7909736-a9e6-4791-942b-31997fe6d3ee\") " pod="openshift-marketplace/certified-operators-l9jw8" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.031502 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.029866 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7909736-a9e6-4791-942b-31997fe6d3ee-utilities\") pod \"certified-operators-l9jw8\" (UID: \"a7909736-a9e6-4791-942b-31997fe6d3ee\") " pod="openshift-marketplace/certified-operators-l9jw8" Dec 10 06:50:22 crc kubenswrapper[4765]: E1210 06:50:22.033995 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:22.533955473 +0000 UTC m=+142.260620789 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.034472 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7909736-a9e6-4791-942b-31997fe6d3ee-catalog-content\") pod \"certified-operators-l9jw8\" (UID: \"a7909736-a9e6-4791-942b-31997fe6d3ee\") " pod="openshift-marketplace/certified-operators-l9jw8" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.072582 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm" event={"ID":"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01","Type":"ContainerStarted","Data":"2e1c516b8c189971fcae4df7c5359edd90c4d77d14564a8cb4919b0597cd18dc"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.072857 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-q9xg4"] Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.081480 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q9xg4" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.107747 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" podStartSLOduration=122.107732981 podStartE2EDuration="2m2.107732981s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:22.106815165 +0000 UTC m=+141.833480511" watchObservedRunningTime="2025-12-10 06:50:22.107732981 +0000 UTC m=+141.834398297" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.113100 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-fv6zj" event={"ID":"2d2049d7-de64-4070-959f-8cefd1f15e5d","Type":"ContainerStarted","Data":"2fe86f040b2008350cad1cc23ecb2c74e4bbae2647a55a37b06dedfb99f2ed20"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.121698 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dzrqt" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.122481 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q9xg4"] Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.124457 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvljv\" (UniqueName: \"kubernetes.io/projected/a7909736-a9e6-4791-942b-31997fe6d3ee-kube-api-access-gvljv\") pod \"certified-operators-l9jw8\" (UID: \"a7909736-a9e6-4791-942b-31997fe6d3ee\") " pod="openshift-marketplace/certified-operators-l9jw8" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.125542 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" event={"ID":"6d03cd7a-f608-45b5-901c-b01678e4b69a","Type":"ContainerStarted","Data":"97220627fed0d6f2e0c0d186c95d7324d475cab5ca788cc51435be8f86bd57ce"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.128792 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-d8jqz" event={"ID":"6256c135-6830-47c9-858c-ad896f3cdee8","Type":"ContainerStarted","Data":"98b7cb978c2694b93258fc37e5a53a76ccccf2a11c90d7f34d355efbcbfb8546"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.132111 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nrlwx" event={"ID":"25285c06-58d0-4f7c-a04c-ff944a2c7add","Type":"ContainerStarted","Data":"80b95a08eeb1ebb80b979c2dcc0698d12f436c270dcbfe423186e58727ea1322"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.132493 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.132639 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f0fa6f1-2039-4003-88b8-d0fccce70b29-catalog-content\") pod \"community-operators-q9xg4\" (UID: \"2f0fa6f1-2039-4003-88b8-d0fccce70b29\") " pod="openshift-marketplace/community-operators-q9xg4" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.132671 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7spmh\" (UniqueName: \"kubernetes.io/projected/2f0fa6f1-2039-4003-88b8-d0fccce70b29-kube-api-access-7spmh\") pod \"community-operators-q9xg4\" (UID: \"2f0fa6f1-2039-4003-88b8-d0fccce70b29\") " pod="openshift-marketplace/community-operators-q9xg4" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.132735 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f0fa6f1-2039-4003-88b8-d0fccce70b29-utilities\") pod \"community-operators-q9xg4\" (UID: \"2f0fa6f1-2039-4003-88b8-d0fccce70b29\") " pod="openshift-marketplace/community-operators-q9xg4" Dec 10 06:50:22 crc kubenswrapper[4765]: E1210 06:50:22.132903 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:22.632887276 +0000 UTC m=+142.359552592 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.136483 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps" event={"ID":"7ffe8020-73ec-4a69-b53f-7be8548df67b","Type":"ContainerStarted","Data":"95b4bb17729b7043ded2ce637151da0f885e3c963d63f3d53e9163a44f40723d"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.172711 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-k44ps"] Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.181939 4765 generic.go:334] "Generic (PLEG): container finished" podID="1e22fddb-6aeb-415e-bea6-b591462f42a8" containerID="e6f99bec3190b824231ebd2339280b003ed5890df6a34204f3495284d3cda8df" exitCode=0 Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.183788 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" event={"ID":"1e22fddb-6aeb-415e-bea6-b591462f42a8","Type":"ContainerDied","Data":"e6f99bec3190b824231ebd2339280b003ed5890df6a34204f3495284d3cda8df"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.183886 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k44ps" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.199869 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l9jw8" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.213287 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b62zd" event={"ID":"301fe92b-b1fa-46ce-a9f1-97c23550e4b9","Type":"ContainerStarted","Data":"ba2e9a2a7992fbfd44b34a5b7ed136b892c9b33247ec6b361ef88557e35a50c3"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.234287 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7spmh\" (UniqueName: \"kubernetes.io/projected/2f0fa6f1-2039-4003-88b8-d0fccce70b29-kube-api-access-7spmh\") pod \"community-operators-q9xg4\" (UID: \"2f0fa6f1-2039-4003-88b8-d0fccce70b29\") " pod="openshift-marketplace/community-operators-q9xg4" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.234376 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b4761b3-3632-4441-897d-4ab2635b7630-catalog-content\") pod \"certified-operators-k44ps\" (UID: \"7b4761b3-3632-4441-897d-4ab2635b7630\") " pod="openshift-marketplace/certified-operators-k44ps" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.234513 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f0fa6f1-2039-4003-88b8-d0fccce70b29-utilities\") pod \"community-operators-q9xg4\" (UID: \"2f0fa6f1-2039-4003-88b8-d0fccce70b29\") " pod="openshift-marketplace/community-operators-q9xg4" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.234546 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b4761b3-3632-4441-897d-4ab2635b7630-utilities\") pod \"certified-operators-k44ps\" (UID: \"7b4761b3-3632-4441-897d-4ab2635b7630\") " pod="openshift-marketplace/certified-operators-k44ps" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.234588 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.234781 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rvxf\" (UniqueName: \"kubernetes.io/projected/7b4761b3-3632-4441-897d-4ab2635b7630-kube-api-access-6rvxf\") pod \"certified-operators-k44ps\" (UID: \"7b4761b3-3632-4441-897d-4ab2635b7630\") " pod="openshift-marketplace/certified-operators-k44ps" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.234811 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f0fa6f1-2039-4003-88b8-d0fccce70b29-catalog-content\") pod \"community-operators-q9xg4\" (UID: \"2f0fa6f1-2039-4003-88b8-d0fccce70b29\") " pod="openshift-marketplace/community-operators-q9xg4" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.235322 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f0fa6f1-2039-4003-88b8-d0fccce70b29-catalog-content\") pod \"community-operators-q9xg4\" (UID: \"2f0fa6f1-2039-4003-88b8-d0fccce70b29\") " pod="openshift-marketplace/community-operators-q9xg4" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.242003 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f0fa6f1-2039-4003-88b8-d0fccce70b29-utilities\") pod \"community-operators-q9xg4\" (UID: \"2f0fa6f1-2039-4003-88b8-d0fccce70b29\") " pod="openshift-marketplace/community-operators-q9xg4" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.252022 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-d48l7" event={"ID":"9195e6d4-dee4-4450-9909-aa0d120dc162","Type":"ContainerStarted","Data":"903b5e4ddc475f40a43042d40b38222826552b34155457acd1bba86c78049bf9"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.258268 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k44ps"] Dec 10 06:50:22 crc kubenswrapper[4765]: E1210 06:50:22.277043 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:22.777021427 +0000 UTC m=+142.503686743 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.293643 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44knz" podStartSLOduration=124.293602841 podStartE2EDuration="2m4.293602841s" podCreationTimestamp="2025-12-10 06:48:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:22.287454089 +0000 UTC m=+142.014119405" watchObservedRunningTime="2025-12-10 06:50:22.293602841 +0000 UTC m=+142.020268177" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.307615 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7spmh\" (UniqueName: \"kubernetes.io/projected/2f0fa6f1-2039-4003-88b8-d0fccce70b29-kube-api-access-7spmh\") pod \"community-operators-q9xg4\" (UID: \"2f0fa6f1-2039-4003-88b8-d0fccce70b29\") " pod="openshift-marketplace/community-operators-q9xg4" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.313822 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk" event={"ID":"468222f3-9fe0-4909-b72b-ec659ca3908e","Type":"ContainerStarted","Data":"1e718ae40c83734fefe9647f284290284bb7a42cbb9cf1b7cfb3dd145c8fdf5a"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.313865 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk" event={"ID":"468222f3-9fe0-4909-b72b-ec659ca3908e","Type":"ContainerStarted","Data":"afaf238d9f547a14ecc8d64712cda33f7a9fcfb64bd989c37336746a1819af43"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.338365 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.338661 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rvxf\" (UniqueName: \"kubernetes.io/projected/7b4761b3-3632-4441-897d-4ab2635b7630-kube-api-access-6rvxf\") pod \"certified-operators-k44ps\" (UID: \"7b4761b3-3632-4441-897d-4ab2635b7630\") " pod="openshift-marketplace/certified-operators-k44ps" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.338738 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b4761b3-3632-4441-897d-4ab2635b7630-catalog-content\") pod \"certified-operators-k44ps\" (UID: \"7b4761b3-3632-4441-897d-4ab2635b7630\") " pod="openshift-marketplace/certified-operators-k44ps" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.338863 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b4761b3-3632-4441-897d-4ab2635b7630-utilities\") pod \"certified-operators-k44ps\" (UID: \"7b4761b3-3632-4441-897d-4ab2635b7630\") " pod="openshift-marketplace/certified-operators-k44ps" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.341211 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b4761b3-3632-4441-897d-4ab2635b7630-utilities\") pod \"certified-operators-k44ps\" (UID: \"7b4761b3-3632-4441-897d-4ab2635b7630\") " pod="openshift-marketplace/certified-operators-k44ps" Dec 10 06:50:22 crc kubenswrapper[4765]: E1210 06:50:22.341637 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:22.841617397 +0000 UTC m=+142.568282723 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.346858 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-lxxm6" event={"ID":"7b597651-9700-4c4b-9d2d-c21dc37c1959","Type":"ContainerStarted","Data":"b16875712e22ccc4bf392f1d759525dc751f967a92dfad60428f17125a56aabb"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.347941 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-lxxm6" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.350983 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b4761b3-3632-4441-897d-4ab2635b7630-catalog-content\") pod \"certified-operators-k44ps\" (UID: \"7b4761b3-3632-4441-897d-4ab2635b7630\") " pod="openshift-marketplace/certified-operators-k44ps" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.359294 4765 patch_prober.go:28] interesting pod/downloads-7954f5f757-lxxm6 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.23:8080/\": dial tcp 10.217.0.23:8080: connect: connection refused" start-of-body= Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.359382 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-lxxm6" podUID="7b597651-9700-4c4b-9d2d-c21dc37c1959" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.23:8080/\": dial tcp 10.217.0.23:8080: connect: connection refused" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.369559 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-fv6zj" podStartSLOduration=122.36953853 podStartE2EDuration="2m2.36953853s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:22.3342137 +0000 UTC m=+142.060879026" watchObservedRunningTime="2025-12-10 06:50:22.36953853 +0000 UTC m=+142.096203846" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.386811 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c8b5s" event={"ID":"206f5b8b-75ee-44ae-bae5-dfd6bc829b9b","Type":"ContainerStarted","Data":"032f6cba9b4b16f041a758ffb32aa39d7834d639ac8d7f5716515b8e74c9b5d1"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.388635 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p4zdq" event={"ID":"1a729b53-a69c-4fe8-a888-f61aa0856910","Type":"ContainerStarted","Data":"3bd6fd27e4e22c347d71b175f0356648060a6d417c9db556de14d0e76553b666"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.393017 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rvxf\" (UniqueName: \"kubernetes.io/projected/7b4761b3-3632-4441-897d-4ab2635b7630-kube-api-access-6rvxf\") pod \"certified-operators-k44ps\" (UID: \"7b4761b3-3632-4441-897d-4ab2635b7630\") " pod="openshift-marketplace/certified-operators-k44ps" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.429854 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-4xcxz" podStartSLOduration=7.42983562 podStartE2EDuration="7.42983562s" podCreationTimestamp="2025-12-10 06:50:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:22.386749142 +0000 UTC m=+142.113414458" watchObservedRunningTime="2025-12-10 06:50:22.42983562 +0000 UTC m=+142.156500936" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.430386 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-6shb9" event={"ID":"25273950-0e4d-409a-8387-b571c2b15a05","Type":"ContainerStarted","Data":"15a255451157961618a437364953652a675b1679a4acbea7c3eeab66dba5c31b"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.441611 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:22 crc kubenswrapper[4765]: E1210 06:50:22.445984 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:22.945964462 +0000 UTC m=+142.672629978 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.449893 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nrlwx" podStartSLOduration=122.449868702 podStartE2EDuration="2m2.449868702s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:22.445452348 +0000 UTC m=+142.172117664" watchObservedRunningTime="2025-12-10 06:50:22.449868702 +0000 UTC m=+142.176534018" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.455886 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9rf4c" event={"ID":"e33fd3b0-8406-4675-91fc-ed8b1b3e7cef","Type":"ContainerStarted","Data":"cfc1c2704f5546e9cc1990192eaffb0c8f1e2c0569c818d008d7965d67cc9513"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.463624 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q9xg4" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.481393 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-6shb9" podStartSLOduration=121.481370645 podStartE2EDuration="2m1.481370645s" podCreationTimestamp="2025-12-10 06:48:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:22.474499642 +0000 UTC m=+142.201164968" watchObservedRunningTime="2025-12-10 06:50:22.481370645 +0000 UTC m=+142.208035961" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.504718 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-tcrlf" event={"ID":"bfc4f256-0f84-4e7e-8e29-8e196911ee59","Type":"ContainerStarted","Data":"31536903c30508610bdf92e3bbd5af5a9e4bdd548cb9be8826e26e70cbe3784a"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.519050 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f" event={"ID":"34ca3d50-d8f9-420b-941c-3d46bd866149","Type":"ContainerStarted","Data":"371633e3eabd1dd0244d3bc6e58f801b0b1297bbdc739bd806bb37a4eb344bc7"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.520426 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.545352 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:22 crc kubenswrapper[4765]: E1210 06:50:22.547625 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:23.047601861 +0000 UTC m=+142.774267177 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.548672 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" event={"ID":"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76","Type":"ContainerStarted","Data":"16badef4290b83ab838080287dd0114a85ddf260647c96f2d33d29c603862e91"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.564349 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k44ps" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.582294 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-lxxm6" podStartSLOduration=122.582273493 podStartE2EDuration="2m2.582273493s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:22.580112873 +0000 UTC m=+142.306778209" watchObservedRunningTime="2025-12-10 06:50:22.582273493 +0000 UTC m=+142.308938829" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.681747 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:22 crc kubenswrapper[4765]: E1210 06:50:22.682212 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:23.182196694 +0000 UTC m=+142.908862010 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.683582 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wpqgk" podStartSLOduration=122.683561342 podStartE2EDuration="2m2.683561342s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:22.627855871 +0000 UTC m=+142.354521187" watchObservedRunningTime="2025-12-10 06:50:22.683561342 +0000 UTC m=+142.410226678" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.684508 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wmcvd" event={"ID":"3719873e-72ba-4b39-a8d5-8f4b3196a96d","Type":"ContainerStarted","Data":"dad3d8412482269a044201d7303247d547c66200e021b9a34a76125b660fafff"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.684603 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.684621 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j692h" event={"ID":"bf9a4b55-b2dd-497c-aee6-c2fa241d5b33","Type":"ContainerStarted","Data":"79950f88858e2e509a33e9c3f64a30eaff28415ad629877ff8cbbb016f381d95"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.723589 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-m9mvq" event={"ID":"4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7","Type":"ContainerStarted","Data":"35dc360ea5d10695a2200879051e86ce4d09c4800255eec844408592c7a0b5f6"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.783805 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:22 crc kubenswrapper[4765]: E1210 06:50:22.783962 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:23.283939496 +0000 UTC m=+143.010604812 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.784208 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:22 crc kubenswrapper[4765]: E1210 06:50:22.784533 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:23.284520023 +0000 UTC m=+143.011185339 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.795728 4765 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-ljp6b container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.14:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.795799 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" podUID="2c029930-d67e-4812-a6e1-bb8d9b806655" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.14:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.822702 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p4zdq" podStartSLOduration=122.822681382 podStartE2EDuration="2m2.822681382s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:22.795437409 +0000 UTC m=+142.522102725" watchObservedRunningTime="2025-12-10 06:50:22.822681382 +0000 UTC m=+142.549346698" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.832594 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" event={"ID":"fa5d5cf0-2956-4bb1-9372-0f858ff17342","Type":"ContainerStarted","Data":"a28f2fbafc1ec81d8df15b899b00338defa3922db93df5b60073d4e38c2c7bf8"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.833638 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.834050 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j692h" podStartSLOduration=122.834037911 podStartE2EDuration="2m2.834037911s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:22.832888298 +0000 UTC m=+142.559553624" watchObservedRunningTime="2025-12-10 06:50:22.834037911 +0000 UTC m=+142.560703227" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.882383 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7p95x" event={"ID":"91562083-eeea-41be-8455-e5fb3ff57453","Type":"ContainerStarted","Data":"3d45b8f6cd04c1250f44290b90c9b9940e877e8cd241f0d0fb11707cfff68736"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.882656 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:22 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:22 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:22 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.882701 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.884679 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-tcrlf" podStartSLOduration=7.88466123 podStartE2EDuration="7.88466123s" podCreationTimestamp="2025-12-10 06:50:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:22.883060155 +0000 UTC m=+142.609725471" watchObservedRunningTime="2025-12-10 06:50:22.88466123 +0000 UTC m=+142.611326546" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.884940 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:22 crc kubenswrapper[4765]: E1210 06:50:22.886041 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:23.386023068 +0000 UTC m=+143.112688384 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.909427 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-qsz8j" event={"ID":"22e4de02-d14f-4a69-8a40-87380cd3ed44","Type":"ContainerStarted","Data":"75420cef74d139ef7df21e9121944630fb1faac292919372345b0d0dbfd8bae3"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.943051 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nrl2f" podStartSLOduration=122.943031036 podStartE2EDuration="2m2.943031036s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:22.94246811 +0000 UTC m=+142.669133446" watchObservedRunningTime="2025-12-10 06:50:22.943031036 +0000 UTC m=+142.669696342" Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.991562 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:22 crc kubenswrapper[4765]: E1210 06:50:22.992037 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:23.492022289 +0000 UTC m=+143.218687605 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.992642 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ns797" event={"ID":"2b45c80a-66df-4cda-9b80-33101f1f8060","Type":"ContainerStarted","Data":"680a3bd3f421bbe820555bc3dd4e689a1592808285a83a2553ffb9c0faa5c443"} Dec 10 06:50:22 crc kubenswrapper[4765]: I1210 06:50:22.998521 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-9rf4c" podStartSLOduration=122.99846487 podStartE2EDuration="2m2.99846487s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:22.987372219 +0000 UTC m=+142.714037535" watchObservedRunningTime="2025-12-10 06:50:22.99846487 +0000 UTC m=+142.725130186" Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.035307 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rz6tg" event={"ID":"40915631-1b59-4783-8633-88ea61ed4814","Type":"ContainerStarted","Data":"39ba264acab86e7de92f375a9b8378ebce7102017e7da1a7ff703ad69945b930"} Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.075128 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" podStartSLOduration=122.075110858 podStartE2EDuration="2m2.075110858s" podCreationTimestamp="2025-12-10 06:48:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:23.074669046 +0000 UTC m=+142.801334362" watchObservedRunningTime="2025-12-10 06:50:23.075110858 +0000 UTC m=+142.801776174" Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.079698 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ln8tm" Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.094441 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:23 crc kubenswrapper[4765]: E1210 06:50:23.095224 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:23.595200311 +0000 UTC m=+143.321865627 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.096017 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:23 crc kubenswrapper[4765]: E1210 06:50:23.101727 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:23.601711004 +0000 UTC m=+143.328376320 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.115934 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rz6tg" podStartSLOduration=123.115913072 podStartE2EDuration="2m3.115913072s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:23.11297808 +0000 UTC m=+142.839643406" watchObservedRunningTime="2025-12-10 06:50:23.115913072 +0000 UTC m=+142.842578388" Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.186397 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dzrqt"] Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.204086 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:23 crc kubenswrapper[4765]: E1210 06:50:23.204651 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:23.704632329 +0000 UTC m=+143.431297645 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.316856 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:23 crc kubenswrapper[4765]: E1210 06:50:23.317803 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:23.817785031 +0000 UTC m=+143.544450347 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.418036 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:23 crc kubenswrapper[4765]: E1210 06:50:23.418626 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:23.918605697 +0000 UTC m=+143.645271013 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.458849 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q9xg4"] Dec 10 06:50:23 crc kubenswrapper[4765]: W1210 06:50:23.481213 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f0fa6f1_2039_4003_88b8_d0fccce70b29.slice/crio-4b8cd164b032fa5650b2137f665f26db16f803f003a42db8c3098e5e9c365a7f WatchSource:0}: Error finding container 4b8cd164b032fa5650b2137f665f26db16f803f003a42db8c3098e5e9c365a7f: Status 404 returned error can't find the container with id 4b8cd164b032fa5650b2137f665f26db16f803f003a42db8c3098e5e9c365a7f Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.523846 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:23 crc kubenswrapper[4765]: E1210 06:50:23.524293 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:24.024275669 +0000 UTC m=+143.750940985 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.591262 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.624952 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:23 crc kubenswrapper[4765]: E1210 06:50:23.626228 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:24.126195006 +0000 UTC m=+143.852860352 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.739503 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.740783 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k44ps"] Dec 10 06:50:23 crc kubenswrapper[4765]: E1210 06:50:23.745572 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:24.245551812 +0000 UTC m=+143.972217128 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.803068 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l9jw8"] Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.842746 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:23 crc kubenswrapper[4765]: E1210 06:50:23.843324 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:24.343304802 +0000 UTC m=+144.069970118 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.855713 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:23 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:23 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:23 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.855857 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.945947 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.946472 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mklh7"] Dec 10 06:50:23 crc kubenswrapper[4765]: E1210 06:50:23.946952 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:24.446930917 +0000 UTC m=+144.173596253 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.947777 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mklh7" Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.956195 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 10 06:50:23 crc kubenswrapper[4765]: I1210 06:50:23.984227 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mklh7"] Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.047166 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:24 crc kubenswrapper[4765]: E1210 06:50:24.047675 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:24.547653809 +0000 UTC m=+144.274319125 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.061155 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wmcvd" event={"ID":"3719873e-72ba-4b39-a8d5-8f4b3196a96d","Type":"ContainerStarted","Data":"d871807c6f88bbbf08095648f34a63d5a0eaf3c9b8e0c1f14885ec40a773b6da"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.063672 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-qsz8j" event={"ID":"22e4de02-d14f-4a69-8a40-87380cd3ed44","Type":"ContainerStarted","Data":"06521544ee6fc81778b908201be70f729b711f56ae4a9fca8dcf6abb9dcd816a"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.077497 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-d48l7" event={"ID":"9195e6d4-dee4-4450-9909-aa0d120dc162","Type":"ContainerStarted","Data":"d6ed8fd009c43e0b6eb954c7334808cdaf59ef1bc62011fd9ef9ac2df2b273e4"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.077539 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-d48l7" event={"ID":"9195e6d4-dee4-4450-9909-aa0d120dc162","Type":"ContainerStarted","Data":"0583de8ba1a605a32f0a895a76e15642dc10be1413555b08507247c521a504b5"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.088177 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" event={"ID":"6d03cd7a-f608-45b5-901c-b01678e4b69a","Type":"ContainerStarted","Data":"499bd7c3f9151f9e9de5afd6b44566c00b4ecabe91f6d464085ddf3bcf616b5d"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.090234 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wmcvd" podStartSLOduration=123.090218362 podStartE2EDuration="2m3.090218362s" podCreationTimestamp="2025-12-10 06:48:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:24.088949537 +0000 UTC m=+143.815614853" watchObservedRunningTime="2025-12-10 06:50:24.090218362 +0000 UTC m=+143.816883678" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.096804 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" event={"ID":"3373bfa8-c7a9-4cba-a8b5-6cd39a33a4f7","Type":"ContainerStarted","Data":"2b12ec3e024a8f252155baf6ace48bf46ecfb1d8df8afe90630554dcc84320c4"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.100892 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2n9v8" event={"ID":"d488de1f-84dd-45c5-a21f-16fd53d4e55a","Type":"ContainerStarted","Data":"50fa1107691eb5752e5cc81becfb0805fbd8cbc181cb435462c140e44b6b1668"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.118762 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps" event={"ID":"7ffe8020-73ec-4a69-b53f-7be8548df67b","Type":"ContainerStarted","Data":"50be5e00cd0d3f284bcb1dc8d9242c7bca53c03e098f1be8ad251fb94cb1337d"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.123692 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" event={"ID":"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76","Type":"ContainerStarted","Data":"23af2783bcb23ff155bea99d928b904a0770871325a29f8248dbef8e59a05875"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.124673 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.128045 4765 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-bkcmk container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.128117 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" podUID="7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.129879 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" podStartSLOduration=124.129863194 podStartE2EDuration="2m4.129863194s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:24.122514598 +0000 UTC m=+143.849179924" watchObservedRunningTime="2025-12-10 06:50:24.129863194 +0000 UTC m=+143.856528510" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.148970 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55-utilities\") pod \"redhat-marketplace-mklh7\" (UID: \"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55\") " pod="openshift-marketplace/redhat-marketplace-mklh7" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.149035 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55-catalog-content\") pod \"redhat-marketplace-mklh7\" (UID: \"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55\") " pod="openshift-marketplace/redhat-marketplace-mklh7" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.149061 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkgcm\" (UniqueName: \"kubernetes.io/projected/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55-kube-api-access-tkgcm\") pod \"redhat-marketplace-mklh7\" (UID: \"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55\") " pod="openshift-marketplace/redhat-marketplace-mklh7" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.149145 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:24 crc kubenswrapper[4765]: E1210 06:50:24.150208 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:24.650196654 +0000 UTC m=+144.376861970 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.152231 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-49sgb" event={"ID":"82306cc4-ec57-498a-8481-5832db533206","Type":"ContainerStarted","Data":"a107a10b8b1340e4dae0ce32b67a02caff6be3355ea3834ad964af3d75c5134c"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.152977 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-49sgb" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.157682 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-d48l7" podStartSLOduration=124.157653253 podStartE2EDuration="2m4.157653253s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:24.153362232 +0000 UTC m=+143.880027548" watchObservedRunningTime="2025-12-10 06:50:24.157653253 +0000 UTC m=+143.884318569" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.181990 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b62zd" event={"ID":"301fe92b-b1fa-46ce-a9f1-97c23550e4b9","Type":"ContainerStarted","Data":"cf55858889e8837ffbc88e7607c5b8eec1d764afff6bdd0c98d90df3b6733779"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.185066 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-qsz8j" podStartSLOduration=124.1850431 podStartE2EDuration="2m4.1850431s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:24.182934021 +0000 UTC m=+143.909599337" watchObservedRunningTime="2025-12-10 06:50:24.1850431 +0000 UTC m=+143.911708416" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.215278 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-j692h" event={"ID":"bf9a4b55-b2dd-497c-aee6-c2fa241d5b33","Type":"ContainerStarted","Data":"f79d14127439c01dd2daed589f3789828f3d00b28d693b39763d9a8ea303c1cd"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.255627 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.255870 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkgcm\" (UniqueName: \"kubernetes.io/projected/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55-kube-api-access-tkgcm\") pod \"redhat-marketplace-mklh7\" (UID: \"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55\") " pod="openshift-marketplace/redhat-marketplace-mklh7" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.256283 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55-utilities\") pod \"redhat-marketplace-mklh7\" (UID: \"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55\") " pod="openshift-marketplace/redhat-marketplace-mklh7" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.256444 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55-catalog-content\") pod \"redhat-marketplace-mklh7\" (UID: \"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55\") " pod="openshift-marketplace/redhat-marketplace-mklh7" Dec 10 06:50:24 crc kubenswrapper[4765]: E1210 06:50:24.257381 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:24.757360998 +0000 UTC m=+144.484026314 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.260838 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55-utilities\") pod \"redhat-marketplace-mklh7\" (UID: \"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55\") " pod="openshift-marketplace/redhat-marketplace-mklh7" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.260847 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55-catalog-content\") pod \"redhat-marketplace-mklh7\" (UID: \"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55\") " pod="openshift-marketplace/redhat-marketplace-mklh7" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.269140 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm" event={"ID":"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01","Type":"ContainerStarted","Data":"8b3c9917c429f0d653cc4a6fab177b0a187ef6e29fed5e5b430fe485fe946084"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.288331 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l9jw8" event={"ID":"a7909736-a9e6-4791-942b-31997fe6d3ee","Type":"ContainerStarted","Data":"1cdf55962c6900ead26b3954f0ceac8318e636f4299ea1fd7ba81692df1594b6"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.294160 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-tzv7k" podStartSLOduration=124.294134228 podStartE2EDuration="2m4.294134228s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:24.228322874 +0000 UTC m=+143.954988190" watchObservedRunningTime="2025-12-10 06:50:24.294134228 +0000 UTC m=+144.020799544" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.323385 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q9xg4" event={"ID":"2f0fa6f1-2039-4003-88b8-d0fccce70b29","Type":"ContainerStarted","Data":"4b8cd164b032fa5650b2137f665f26db16f803f003a42db8c3098e5e9c365a7f"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.324209 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkgcm\" (UniqueName: \"kubernetes.io/projected/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55-kube-api-access-tkgcm\") pod \"redhat-marketplace-mklh7\" (UID: \"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55\") " pod="openshift-marketplace/redhat-marketplace-mklh7" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.328634 4765 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.334197 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" podStartSLOduration=124.334171411 podStartE2EDuration="2m4.334171411s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:24.288560882 +0000 UTC m=+144.015226198" watchObservedRunningTime="2025-12-10 06:50:24.334171411 +0000 UTC m=+144.060836747" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.335064 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gzgps" podStartSLOduration=124.335058856 podStartE2EDuration="2m4.335058856s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:24.330432566 +0000 UTC m=+144.057097882" watchObservedRunningTime="2025-12-10 06:50:24.335058856 +0000 UTC m=+144.061724172" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.357914 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:24 crc kubenswrapper[4765]: E1210 06:50:24.359599 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:24.859585663 +0000 UTC m=+144.586250979 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.362482 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k44ps" event={"ID":"7b4761b3-3632-4441-897d-4ab2635b7630","Type":"ContainerStarted","Data":"e6f6af63def80df10d8a0eb3295a61bf274a25f66c2b50e2a08e9e403fecc8db"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.386711 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2n9v8" podStartSLOduration=124.386686433 podStartE2EDuration="2m4.386686433s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:24.378789931 +0000 UTC m=+144.105455247" watchObservedRunningTime="2025-12-10 06:50:24.386686433 +0000 UTC m=+144.113351749" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.386920 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cqthw"] Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.387869 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cqthw" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.422211 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dzrqt" event={"ID":"c073bf0a-0806-4af1-9902-a0fe221901be","Type":"ContainerStarted","Data":"8e8fdb26ab0e1d21588cf1c79f7bc39952df3bbd457a0da5889c84d7fde28480"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.422243 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dzrqt" event={"ID":"c073bf0a-0806-4af1-9902-a0fe221901be","Type":"ContainerStarted","Data":"2fecd9b0c29f22b3ea3c4ac932734c41d2ce4f855da1f85d66d2fc37173232c5"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.422503 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cqthw"] Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.446161 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b62zd" podStartSLOduration=124.44614185 podStartE2EDuration="2m4.44614185s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:24.445922943 +0000 UTC m=+144.172588259" watchObservedRunningTime="2025-12-10 06:50:24.44614185 +0000 UTC m=+144.172807166" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.466686 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ns797" event={"ID":"2b45c80a-66df-4cda-9b80-33101f1f8060","Type":"ContainerStarted","Data":"c4acfd1d884b17fafe3fc913b452dd76021c21cba5fd8f03ef2af9b162dfe217"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.466753 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-ns797" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.483148 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.483704 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6w68\" (UniqueName: \"kubernetes.io/projected/2a320eac-17e0-42a1-8e4d-3f1c87a72e92-kube-api-access-t6w68\") pod \"redhat-marketplace-cqthw\" (UID: \"2a320eac-17e0-42a1-8e4d-3f1c87a72e92\") " pod="openshift-marketplace/redhat-marketplace-cqthw" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.483768 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a320eac-17e0-42a1-8e4d-3f1c87a72e92-utilities\") pod \"redhat-marketplace-cqthw\" (UID: \"2a320eac-17e0-42a1-8e4d-3f1c87a72e92\") " pod="openshift-marketplace/redhat-marketplace-cqthw" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.484055 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a320eac-17e0-42a1-8e4d-3f1c87a72e92-catalog-content\") pod \"redhat-marketplace-cqthw\" (UID: \"2a320eac-17e0-42a1-8e4d-3f1c87a72e92\") " pod="openshift-marketplace/redhat-marketplace-cqthw" Dec 10 06:50:24 crc kubenswrapper[4765]: E1210 06:50:24.484305 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:24.984253948 +0000 UTC m=+144.710919264 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.495055 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" event={"ID":"1e22fddb-6aeb-415e-bea6-b591462f42a8","Type":"ContainerStarted","Data":"e85be69eefa483435bd0a22cb9dd48e7be3ab53996f97aaf10b996ca0501d965"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.500028 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-49sgb" podStartSLOduration=124.500009229 podStartE2EDuration="2m4.500009229s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:24.493723393 +0000 UTC m=+144.220388719" watchObservedRunningTime="2025-12-10 06:50:24.500009229 +0000 UTC m=+144.226674545" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.555999 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-s5sr5" event={"ID":"9c657c6a-60d0-4e3f-b70e-c602bb01bf7c","Type":"ContainerStarted","Data":"3cb7deb8bd3a1ff5b771c58399b574cac7519d16a4c02916d804287af72f7783"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.556726 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-s5sr5" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.575487 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mklh7" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.580079 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c8b5s" event={"ID":"206f5b8b-75ee-44ae-bae5-dfd6bc829b9b","Type":"ContainerStarted","Data":"f5946105e7b0966e5e9676f4b6ae9b66229731b0223f1b6dd11662698d4df592"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.584702 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm" podStartSLOduration=125.583078428 podStartE2EDuration="2m5.583078428s" podCreationTimestamp="2025-12-10 06:48:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:24.580001872 +0000 UTC m=+144.306667208" watchObservedRunningTime="2025-12-10 06:50:24.583078428 +0000 UTC m=+144.309743744" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.585951 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a320eac-17e0-42a1-8e4d-3f1c87a72e92-catalog-content\") pod \"redhat-marketplace-cqthw\" (UID: \"2a320eac-17e0-42a1-8e4d-3f1c87a72e92\") " pod="openshift-marketplace/redhat-marketplace-cqthw" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.586054 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6w68\" (UniqueName: \"kubernetes.io/projected/2a320eac-17e0-42a1-8e4d-3f1c87a72e92-kube-api-access-t6w68\") pod \"redhat-marketplace-cqthw\" (UID: \"2a320eac-17e0-42a1-8e4d-3f1c87a72e92\") " pod="openshift-marketplace/redhat-marketplace-cqthw" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.586109 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a320eac-17e0-42a1-8e4d-3f1c87a72e92-utilities\") pod \"redhat-marketplace-cqthw\" (UID: \"2a320eac-17e0-42a1-8e4d-3f1c87a72e92\") " pod="openshift-marketplace/redhat-marketplace-cqthw" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.586150 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:24 crc kubenswrapper[4765]: E1210 06:50:24.586484 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:25.086471293 +0000 UTC m=+144.813136609 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.587351 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a320eac-17e0-42a1-8e4d-3f1c87a72e92-utilities\") pod \"redhat-marketplace-cqthw\" (UID: \"2a320eac-17e0-42a1-8e4d-3f1c87a72e92\") " pod="openshift-marketplace/redhat-marketplace-cqthw" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.600354 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a320eac-17e0-42a1-8e4d-3f1c87a72e92-catalog-content\") pod \"redhat-marketplace-cqthw\" (UID: \"2a320eac-17e0-42a1-8e4d-3f1c87a72e92\") " pod="openshift-marketplace/redhat-marketplace-cqthw" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.636911 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6w68\" (UniqueName: \"kubernetes.io/projected/2a320eac-17e0-42a1-8e4d-3f1c87a72e92-kube-api-access-t6w68\") pod \"redhat-marketplace-cqthw\" (UID: \"2a320eac-17e0-42a1-8e4d-3f1c87a72e92\") " pod="openshift-marketplace/redhat-marketplace-cqthw" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.654475 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-d8jqz" event={"ID":"6256c135-6830-47c9-858c-ad896f3cdee8","Type":"ContainerStarted","Data":"3b11ff7a27fa3b78f3a35ddca205d6fdc0b3688d3fb52ffe75a10bd7c71fe40c"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.666974 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d" event={"ID":"8c1203d5-c663-4f5a-96b5-ca6b398114bd","Type":"ContainerStarted","Data":"c341e52c858b0db7cb7c1b4c9def848f6f9af19708a507085f401d69861b3215"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.672543 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-ns797" podStartSLOduration=9.672521745 podStartE2EDuration="9.672521745s" podCreationTimestamp="2025-12-10 06:50:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:24.672186716 +0000 UTC m=+144.398852032" watchObservedRunningTime="2025-12-10 06:50:24.672521745 +0000 UTC m=+144.399187071" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.686406 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-6shb9" event={"ID":"25273950-0e4d-409a-8387-b571c2b15a05","Type":"ContainerStarted","Data":"7d3b26d7fdd8c46117cd09ccba8333e76b3b273d8eaf1d54745c8f9d3b3c7fcc"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.687547 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:24 crc kubenswrapper[4765]: E1210 06:50:24.688910 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:25.188890934 +0000 UTC m=+144.915556250 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.731722 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-m9mvq" event={"ID":"4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7","Type":"ContainerStarted","Data":"fce47ce93605c06c49607d7742e17c9fde7fdd0cd477e6983125a55251921fe6"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.731766 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-m9mvq" event={"ID":"4c3435f8-cf1f-4103-bdf6-8da6f0f4f9c7","Type":"ContainerStarted","Data":"48e5d84717fce1f2afb7f9a066204b048cb5433c974d9315dc6ae8be23ecbf3b"} Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.733257 4765 patch_prober.go:28] interesting pod/downloads-7954f5f757-lxxm6 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.23:8080/\": dial tcp 10.217.0.23:8080: connect: connection refused" start-of-body= Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.733308 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-lxxm6" podUID="7b597651-9700-4c4b-9d2d-c21dc37c1959" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.23:8080/\": dial tcp 10.217.0.23:8080: connect: connection refused" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.749075 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.750344 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" podStartSLOduration=124.750323186 podStartE2EDuration="2m4.750323186s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:24.719970205 +0000 UTC m=+144.446635531" watchObservedRunningTime="2025-12-10 06:50:24.750323186 +0000 UTC m=+144.476988502" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.751344 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-98pgv"] Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.752482 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-98pgv" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.757626 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-98pgv"] Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.758040 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.768464 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nqr5d" podStartSLOduration=124.768447904 podStartE2EDuration="2m4.768447904s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:24.767740284 +0000 UTC m=+144.494405630" watchObservedRunningTime="2025-12-10 06:50:24.768447904 +0000 UTC m=+144.495113220" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.793945 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5fd4051-3069-4ca7-829d-ad57d3d50c4c-catalog-content\") pod \"redhat-operators-98pgv\" (UID: \"c5fd4051-3069-4ca7-829d-ad57d3d50c4c\") " pod="openshift-marketplace/redhat-operators-98pgv" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.794555 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5fd4051-3069-4ca7-829d-ad57d3d50c4c-utilities\") pod \"redhat-operators-98pgv\" (UID: \"c5fd4051-3069-4ca7-829d-ad57d3d50c4c\") " pod="openshift-marketplace/redhat-operators-98pgv" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.794750 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrtnk\" (UniqueName: \"kubernetes.io/projected/c5fd4051-3069-4ca7-829d-ad57d3d50c4c-kube-api-access-hrtnk\") pod \"redhat-operators-98pgv\" (UID: \"c5fd4051-3069-4ca7-829d-ad57d3d50c4c\") " pod="openshift-marketplace/redhat-operators-98pgv" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.794945 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.812061 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-d8jqz" podStartSLOduration=124.812040666 podStartE2EDuration="2m4.812040666s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:24.809465784 +0000 UTC m=+144.536131100" watchObservedRunningTime="2025-12-10 06:50:24.812040666 +0000 UTC m=+144.538705992" Dec 10 06:50:24 crc kubenswrapper[4765]: E1210 06:50:24.821645 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:25.321630245 +0000 UTC m=+145.048295561 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.840394 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cqthw" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.869422 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:24 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:24 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:24 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.869491 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.892057 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-c8b5s" podStartSLOduration=124.892039569 podStartE2EDuration="2m4.892039569s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:24.847704956 +0000 UTC m=+144.574370272" watchObservedRunningTime="2025-12-10 06:50:24.892039569 +0000 UTC m=+144.618704885" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.892239 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-s5sr5" podStartSLOduration=123.892233664 podStartE2EDuration="2m3.892233664s" podCreationTimestamp="2025-12-10 06:48:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:24.8903214 +0000 UTC m=+144.616986716" watchObservedRunningTime="2025-12-10 06:50:24.892233664 +0000 UTC m=+144.618898980" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.913057 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.913252 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrtnk\" (UniqueName: \"kubernetes.io/projected/c5fd4051-3069-4ca7-829d-ad57d3d50c4c-kube-api-access-hrtnk\") pod \"redhat-operators-98pgv\" (UID: \"c5fd4051-3069-4ca7-829d-ad57d3d50c4c\") " pod="openshift-marketplace/redhat-operators-98pgv" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.913343 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5fd4051-3069-4ca7-829d-ad57d3d50c4c-catalog-content\") pod \"redhat-operators-98pgv\" (UID: \"c5fd4051-3069-4ca7-829d-ad57d3d50c4c\") " pod="openshift-marketplace/redhat-operators-98pgv" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.913373 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5fd4051-3069-4ca7-829d-ad57d3d50c4c-utilities\") pod \"redhat-operators-98pgv\" (UID: \"c5fd4051-3069-4ca7-829d-ad57d3d50c4c\") " pod="openshift-marketplace/redhat-operators-98pgv" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.913868 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5fd4051-3069-4ca7-829d-ad57d3d50c4c-utilities\") pod \"redhat-operators-98pgv\" (UID: \"c5fd4051-3069-4ca7-829d-ad57d3d50c4c\") " pod="openshift-marketplace/redhat-operators-98pgv" Dec 10 06:50:24 crc kubenswrapper[4765]: E1210 06:50:24.913944 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:25.413929392 +0000 UTC m=+145.140594708 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.914486 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5fd4051-3069-4ca7-829d-ad57d3d50c4c-catalog-content\") pod \"redhat-operators-98pgv\" (UID: \"c5fd4051-3069-4ca7-829d-ad57d3d50c4c\") " pod="openshift-marketplace/redhat-operators-98pgv" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.943420 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrtnk\" (UniqueName: \"kubernetes.io/projected/c5fd4051-3069-4ca7-829d-ad57d3d50c4c-kube-api-access-hrtnk\") pod \"redhat-operators-98pgv\" (UID: \"c5fd4051-3069-4ca7-829d-ad57d3d50c4c\") " pod="openshift-marketplace/redhat-operators-98pgv" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.966451 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-m9mvq" podStartSLOduration=124.966430414 podStartE2EDuration="2m4.966430414s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:24.923385897 +0000 UTC m=+144.650051223" watchObservedRunningTime="2025-12-10 06:50:24.966430414 +0000 UTC m=+144.693095730" Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.967905 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vcct8"] Dec 10 06:50:24 crc kubenswrapper[4765]: I1210 06:50:24.989660 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vcct8" Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.047788 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:25 crc kubenswrapper[4765]: E1210 06:50:25.048593 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:25.548572327 +0000 UTC m=+145.275237643 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.120668 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vcct8"] Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.125233 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-98pgv" Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.152719 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.153064 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx5dg\" (UniqueName: \"kubernetes.io/projected/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994-kube-api-access-xx5dg\") pod \"redhat-operators-vcct8\" (UID: \"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994\") " pod="openshift-marketplace/redhat-operators-vcct8" Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.153115 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994-catalog-content\") pod \"redhat-operators-vcct8\" (UID: \"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994\") " pod="openshift-marketplace/redhat-operators-vcct8" Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.153168 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994-utilities\") pod \"redhat-operators-vcct8\" (UID: \"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994\") " pod="openshift-marketplace/redhat-operators-vcct8" Dec 10 06:50:25 crc kubenswrapper[4765]: E1210 06:50:25.153290 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:25.653274271 +0000 UTC m=+145.379939587 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.254522 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx5dg\" (UniqueName: \"kubernetes.io/projected/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994-kube-api-access-xx5dg\") pod \"redhat-operators-vcct8\" (UID: \"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994\") " pod="openshift-marketplace/redhat-operators-vcct8" Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.254583 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994-catalog-content\") pod \"redhat-operators-vcct8\" (UID: \"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994\") " pod="openshift-marketplace/redhat-operators-vcct8" Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.254616 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.254693 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994-utilities\") pod \"redhat-operators-vcct8\" (UID: \"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994\") " pod="openshift-marketplace/redhat-operators-vcct8" Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.261804 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994-utilities\") pod \"redhat-operators-vcct8\" (UID: \"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994\") " pod="openshift-marketplace/redhat-operators-vcct8" Dec 10 06:50:25 crc kubenswrapper[4765]: E1210 06:50:25.262279 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:25.762243806 +0000 UTC m=+145.488909122 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.265550 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994-catalog-content\") pod \"redhat-operators-vcct8\" (UID: \"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994\") " pod="openshift-marketplace/redhat-operators-vcct8" Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.290254 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx5dg\" (UniqueName: \"kubernetes.io/projected/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994-kube-api-access-xx5dg\") pod \"redhat-operators-vcct8\" (UID: \"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994\") " pod="openshift-marketplace/redhat-operators-vcct8" Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.337413 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mklh7"] Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.361142 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:25 crc kubenswrapper[4765]: E1210 06:50:25.362012 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:25.861979502 +0000 UTC m=+145.588644818 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.362240 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:25 crc kubenswrapper[4765]: E1210 06:50:25.362903 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:25.862887007 +0000 UTC m=+145.589552323 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.363122 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vcct8" Dec 10 06:50:25 crc kubenswrapper[4765]: W1210 06:50:25.371255 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d0a35e0_faa1_4efe_9861_75f4d1fa7f55.slice/crio-ec8d4b9245abe845851ff6082af6bb369c6b11cb884d254afb23c6e43dd3b71c WatchSource:0}: Error finding container ec8d4b9245abe845851ff6082af6bb369c6b11cb884d254afb23c6e43dd3b71c: Status 404 returned error can't find the container with id ec8d4b9245abe845851ff6082af6bb369c6b11cb884d254afb23c6e43dd3b71c Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.463802 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:25 crc kubenswrapper[4765]: E1210 06:50:25.464336 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:25.964319971 +0000 UTC m=+145.690985287 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.508045 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cqthw"] Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.564995 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:25 crc kubenswrapper[4765]: E1210 06:50:25.565506 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:26.065483766 +0000 UTC m=+145.792149242 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.666113 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:25 crc kubenswrapper[4765]: E1210 06:50:25.666623 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:26.166595651 +0000 UTC m=+145.893260967 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.666732 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:25 crc kubenswrapper[4765]: E1210 06:50:25.667064 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:26.167052724 +0000 UTC m=+145.893718040 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.764348 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mklh7" event={"ID":"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55","Type":"ContainerStarted","Data":"90d338389c09afdf6d96d3d9009493536090262178181702fe07f14859495957"} Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.764395 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mklh7" event={"ID":"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55","Type":"ContainerStarted","Data":"ec8d4b9245abe845851ff6082af6bb369c6b11cb884d254afb23c6e43dd3b71c"} Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.768752 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:25 crc kubenswrapper[4765]: E1210 06:50:25.768876 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:26.268844607 +0000 UTC m=+145.995509923 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.769203 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:25 crc kubenswrapper[4765]: E1210 06:50:25.769606 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:26.269590458 +0000 UTC m=+145.996255784 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.804550 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7p95x" event={"ID":"91562083-eeea-41be-8455-e5fb3ff57453","Type":"ContainerStarted","Data":"bc069e2153f58bfd986f771fc5be6c8168ea2a8ea03238e1c34496932193df65"} Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.822635 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cqthw" event={"ID":"2a320eac-17e0-42a1-8e4d-3f1c87a72e92","Type":"ContainerStarted","Data":"8f07080714043f9b75c0886fac0d25dc79f17bcbb5180b601f2cbd25a04a4cb0"} Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.851964 4765 generic.go:334] "Generic (PLEG): container finished" podID="a7909736-a9e6-4791-942b-31997fe6d3ee" containerID="db2d6231b064740e710375bed0966611867fbcc45c43b2ef15b6b357cdbc1ba6" exitCode=0 Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.852070 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l9jw8" event={"ID":"a7909736-a9e6-4791-942b-31997fe6d3ee","Type":"ContainerDied","Data":"db2d6231b064740e710375bed0966611867fbcc45c43b2ef15b6b357cdbc1ba6"} Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.859190 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:25 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:25 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:25 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.859413 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.870038 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:25 crc kubenswrapper[4765]: E1210 06:50:25.870450 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:26.370436985 +0000 UTC m=+146.097102301 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.889015 4765 generic.go:334] "Generic (PLEG): container finished" podID="2f0fa6f1-2039-4003-88b8-d0fccce70b29" containerID="4dc1e9d9c5f42658598a387e8485de5b11251fc44a437e692ddaae30a46e93f5" exitCode=0 Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.889159 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q9xg4" event={"ID":"2f0fa6f1-2039-4003-88b8-d0fccce70b29","Type":"ContainerDied","Data":"4dc1e9d9c5f42658598a387e8485de5b11251fc44a437e692ddaae30a46e93f5"} Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.919908 4765 generic.go:334] "Generic (PLEG): container finished" podID="7b4761b3-3632-4441-897d-4ab2635b7630" containerID="3a8c2fb53ede4e46a9ea869a26ba912719d8b080e6ef9dc7c10fb16b9ea3d78d" exitCode=0 Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.919993 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k44ps" event={"ID":"7b4761b3-3632-4441-897d-4ab2635b7630","Type":"ContainerDied","Data":"3a8c2fb53ede4e46a9ea869a26ba912719d8b080e6ef9dc7c10fb16b9ea3d78d"} Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.947696 4765 generic.go:334] "Generic (PLEG): container finished" podID="c073bf0a-0806-4af1-9902-a0fe221901be" containerID="8e8fdb26ab0e1d21588cf1c79f7bc39952df3bbd457a0da5889c84d7fde28480" exitCode=0 Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.948673 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dzrqt" event={"ID":"c073bf0a-0806-4af1-9902-a0fe221901be","Type":"ContainerDied","Data":"8e8fdb26ab0e1d21588cf1c79f7bc39952df3bbd457a0da5889c84d7fde28480"} Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.954572 4765 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-bkcmk container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.954613 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" podUID="7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.956800 4765 patch_prober.go:28] interesting pod/downloads-7954f5f757-lxxm6 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.23:8080/\": dial tcp 10.217.0.23:8080: connect: connection refused" start-of-body= Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.956934 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-lxxm6" podUID="7b597651-9700-4c4b-9d2d-c21dc37c1959" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.23:8080/\": dial tcp 10.217.0.23:8080: connect: connection refused" Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.976327 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:25 crc kubenswrapper[4765]: E1210 06:50:25.977261 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:26.477219668 +0000 UTC m=+146.203884984 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.985406 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-98pgv"] Dec 10 06:50:25 crc kubenswrapper[4765]: I1210 06:50:25.990336 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-49sgb" Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.054799 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vcct8"] Dec 10 06:50:26 crc kubenswrapper[4765]: W1210 06:50:26.064542 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5fd4051_3069_4ca7_829d_ad57d3d50c4c.slice/crio-44c2c12afdd78c5b60915bbf4f47b43a7bae89ae6f596a292b499c0495074d5a WatchSource:0}: Error finding container 44c2c12afdd78c5b60915bbf4f47b43a7bae89ae6f596a292b499c0495074d5a: Status 404 returned error can't find the container with id 44c2c12afdd78c5b60915bbf4f47b43a7bae89ae6f596a292b499c0495074d5a Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.081907 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:26 crc kubenswrapper[4765]: E1210 06:50:26.082181 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:26.582131829 +0000 UTC m=+146.308797155 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.083147 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:26 crc kubenswrapper[4765]: E1210 06:50:26.099573 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:26.599554997 +0000 UTC m=+146.326220503 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.185804 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:26 crc kubenswrapper[4765]: E1210 06:50:26.186360 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:26.68633148 +0000 UTC m=+146.412996796 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.288114 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:26 crc kubenswrapper[4765]: E1210 06:50:26.288680 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:26.788663228 +0000 UTC m=+146.515328554 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.389781 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.390111 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.390161 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.390221 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:50:26 crc kubenswrapper[4765]: E1210 06:50:26.404353 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:26.9043239 +0000 UTC m=+146.630989216 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.410889 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.420663 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.491915 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.491966 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:50:26 crc kubenswrapper[4765]: E1210 06:50:26.492385 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:26.992366408 +0000 UTC m=+146.719031724 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.508890 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.594126 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:26 crc kubenswrapper[4765]: E1210 06:50:26.601664 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:27.101638881 +0000 UTC m=+146.828304187 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.641829 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.696683 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:26 crc kubenswrapper[4765]: E1210 06:50:26.697139 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:27.197122858 +0000 UTC m=+146.923788174 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.702164 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.709418 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.797980 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:26 crc kubenswrapper[4765]: E1210 06:50:26.798218 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:27.298185981 +0000 UTC m=+147.024851297 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.798298 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:26 crc kubenswrapper[4765]: E1210 06:50:26.798934 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:27.298923422 +0000 UTC m=+147.025588738 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.851249 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:26 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:26 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:26 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.851329 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.900118 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:26 crc kubenswrapper[4765]: E1210 06:50:26.900539 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:27.40052146 +0000 UTC m=+147.127186776 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.910385 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.979574 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vcct8" event={"ID":"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994","Type":"ContainerStarted","Data":"39ff5c7158924561f24cbb6fc8c858771320408d5c937ae902eaa8910fa73e0e"} Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.988898 4765 generic.go:334] "Generic (PLEG): container finished" podID="2a320eac-17e0-42a1-8e4d-3f1c87a72e92" containerID="933300b6612795603cbfd9a376c5db2c937426ed3564aa145674dedfb58ebe88" exitCode=0 Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.989010 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cqthw" event={"ID":"2a320eac-17e0-42a1-8e4d-3f1c87a72e92","Type":"ContainerDied","Data":"933300b6612795603cbfd9a376c5db2c937426ed3564aa145674dedfb58ebe88"} Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.995850 4765 generic.go:334] "Generic (PLEG): container finished" podID="1d0a35e0-faa1-4efe-9861-75f4d1fa7f55" containerID="90d338389c09afdf6d96d3d9009493536090262178181702fe07f14859495957" exitCode=0 Dec 10 06:50:26 crc kubenswrapper[4765]: I1210 06:50:26.995928 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mklh7" event={"ID":"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55","Type":"ContainerDied","Data":"90d338389c09afdf6d96d3d9009493536090262178181702fe07f14859495957"} Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.002468 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:27 crc kubenswrapper[4765]: E1210 06:50:27.002862 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:27.502839938 +0000 UTC m=+147.229505274 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.011144 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-98pgv" event={"ID":"c5fd4051-3069-4ca7-829d-ad57d3d50c4c","Type":"ContainerStarted","Data":"44c2c12afdd78c5b60915bbf4f47b43a7bae89ae6f596a292b499c0495074d5a"} Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.025526 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.057649 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.057696 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.071682 4765 patch_prober.go:28] interesting pod/apiserver-76f77b778f-gvn7z container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 10 06:50:27 crc kubenswrapper[4765]: [+]log ok Dec 10 06:50:27 crc kubenswrapper[4765]: [+]etcd ok Dec 10 06:50:27 crc kubenswrapper[4765]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 10 06:50:27 crc kubenswrapper[4765]: [+]poststarthook/generic-apiserver-start-informers ok Dec 10 06:50:27 crc kubenswrapper[4765]: [+]poststarthook/max-in-flight-filter ok Dec 10 06:50:27 crc kubenswrapper[4765]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 10 06:50:27 crc kubenswrapper[4765]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 10 06:50:27 crc kubenswrapper[4765]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 10 06:50:27 crc kubenswrapper[4765]: [+]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa ok Dec 10 06:50:27 crc kubenswrapper[4765]: [+]poststarthook/project.openshift.io-projectcache ok Dec 10 06:50:27 crc kubenswrapper[4765]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 10 06:50:27 crc kubenswrapper[4765]: [+]poststarthook/openshift.io-startinformers ok Dec 10 06:50:27 crc kubenswrapper[4765]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 10 06:50:27 crc kubenswrapper[4765]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 10 06:50:27 crc kubenswrapper[4765]: livez check failed Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.071775 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" podUID="6d03cd7a-f608-45b5-901c-b01678e4b69a" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.106802 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:27 crc kubenswrapper[4765]: E1210 06:50:27.107934 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:27.607911833 +0000 UTC m=+147.334577149 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.108600 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:27 crc kubenswrapper[4765]: E1210 06:50:27.119912 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:27.619893259 +0000 UTC m=+147.346558585 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.221982 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:27 crc kubenswrapper[4765]: E1210 06:50:27.222176 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:27.722153535 +0000 UTC m=+147.448818851 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.222347 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:27 crc kubenswrapper[4765]: E1210 06:50:27.222626 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:27.722618148 +0000 UTC m=+147.449283464 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.329191 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:27 crc kubenswrapper[4765]: E1210 06:50:27.330135 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:27.830109792 +0000 UTC m=+147.556775108 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.330184 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:27 crc kubenswrapper[4765]: E1210 06:50:27.330561 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:27.830551424 +0000 UTC m=+147.557216750 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.431141 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:27 crc kubenswrapper[4765]: E1210 06:50:27.431648 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:27.931622857 +0000 UTC m=+147.658288263 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.521801 4765 patch_prober.go:28] interesting pod/downloads-7954f5f757-lxxm6 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.23:8080/\": dial tcp 10.217.0.23:8080: connect: connection refused" start-of-body= Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.521853 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-lxxm6" podUID="7b597651-9700-4c4b-9d2d-c21dc37c1959" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.23:8080/\": dial tcp 10.217.0.23:8080: connect: connection refused" Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.521959 4765 patch_prober.go:28] interesting pod/downloads-7954f5f757-lxxm6 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.23:8080/\": dial tcp 10.217.0.23:8080: connect: connection refused" start-of-body= Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.522013 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-lxxm6" podUID="7b597651-9700-4c4b-9d2d-c21dc37c1959" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.23:8080/\": dial tcp 10.217.0.23:8080: connect: connection refused" Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.532999 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:27 crc kubenswrapper[4765]: E1210 06:50:27.533404 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:28.03338935 +0000 UTC m=+147.760054666 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:27 crc kubenswrapper[4765]: W1210 06:50:27.602175 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-a333f09e22ed63f3d24bed3408bf0cb3d1249b2af8c46e280eede01e90eb57bd WatchSource:0}: Error finding container a333f09e22ed63f3d24bed3408bf0cb3d1249b2af8c46e280eede01e90eb57bd: Status 404 returned error can't find the container with id a333f09e22ed63f3d24bed3408bf0cb3d1249b2af8c46e280eede01e90eb57bd Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.634902 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:27 crc kubenswrapper[4765]: E1210 06:50:27.635068 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:28.135037618 +0000 UTC m=+147.861702934 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.635228 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:27 crc kubenswrapper[4765]: E1210 06:50:27.635634 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:28.135624315 +0000 UTC m=+147.862289631 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.736711 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:27 crc kubenswrapper[4765]: E1210 06:50:27.736815 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:28.236799001 +0000 UTC m=+147.963464317 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.737109 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:27 crc kubenswrapper[4765]: E1210 06:50:27.737411 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:28.237401828 +0000 UTC m=+147.964067144 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.838975 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:27 crc kubenswrapper[4765]: E1210 06:50:27.839161 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:28.339133209 +0000 UTC m=+148.065798525 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.839804 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:27 crc kubenswrapper[4765]: E1210 06:50:27.840305 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:28.340273961 +0000 UTC m=+148.066939277 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.846288 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.849550 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:27 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:27 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:27 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.849612 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.940704 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:27 crc kubenswrapper[4765]: E1210 06:50:27.940911 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:28.440881861 +0000 UTC m=+148.167547177 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:27 crc kubenswrapper[4765]: I1210 06:50:27.941556 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:27 crc kubenswrapper[4765]: E1210 06:50:27.944197 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:28.444179144 +0000 UTC m=+148.170844460 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.033560 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"1e02b2c3f79ff709ac403d67c4e396f7df84ac2bc108a78e2def170d76e5a85a"} Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.033617 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"38f4dcad4ff4c1d580f638313bb3ac63a22b84e8cc25be14e545a8caf5cfb48c"} Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.033786 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.038776 4765 generic.go:334] "Generic (PLEG): container finished" podID="4f7e189b-f64e-4a06-b5ac-ec94f0f3c994" containerID="7c56056fdc1bba00b03e97dd39668e9c9f4f3f60a7a73ae09223432ff41471b1" exitCode=0 Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.038857 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vcct8" event={"ID":"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994","Type":"ContainerDied","Data":"7c56056fdc1bba00b03e97dd39668e9c9f4f3f60a7a73ae09223432ff41471b1"} Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.041120 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"ca975783c1ea0408de51b628dcc39aa827e5cac2899f4be499bcb138f0d740f7"} Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.041163 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"f2f16783f064c61c5d09deb29b403c5aeef3b65a9753e964687330bf0f40a14c"} Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.042556 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:28 crc kubenswrapper[4765]: E1210 06:50:28.042841 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:28.542822039 +0000 UTC m=+148.269487355 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.042898 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:28 crc kubenswrapper[4765]: E1210 06:50:28.043323 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:28.543311863 +0000 UTC m=+148.269977359 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.044879 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"1688e0060b59153346e1e9701f80682ed8120e672329f05d876c60b5ea4eee59"} Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.044932 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"a333f09e22ed63f3d24bed3408bf0cb3d1249b2af8c46e280eede01e90eb57bd"} Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.057465 4765 generic.go:334] "Generic (PLEG): container finished" podID="c5fd4051-3069-4ca7-829d-ad57d3d50c4c" containerID="93bae6ae77af242f8e388001813976b5af5d11e88b7997698b4bc955123d7de0" exitCode=0 Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.057822 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-98pgv" event={"ID":"c5fd4051-3069-4ca7-829d-ad57d3d50c4c","Type":"ContainerDied","Data":"93bae6ae77af242f8e388001813976b5af5d11e88b7997698b4bc955123d7de0"} Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.144185 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:28 crc kubenswrapper[4765]: E1210 06:50:28.145381 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:28.645364793 +0000 UTC m=+148.372030109 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.246013 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:28 crc kubenswrapper[4765]: E1210 06:50:28.246715 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:28.746683003 +0000 UTC m=+148.473348319 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.348171 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:28 crc kubenswrapper[4765]: E1210 06:50:28.348645 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:28.848622741 +0000 UTC m=+148.575288067 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.348899 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:28 crc kubenswrapper[4765]: E1210 06:50:28.349547 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:28.849530586 +0000 UTC m=+148.576195902 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.362846 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.363512 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.367956 4765 patch_prober.go:28] interesting pod/console-f9d7485db-fv6zj container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.368022 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-fv6zj" podUID="2d2049d7-de64-4070-959f-8cefd1f15e5d" containerName="console" probeResult="failure" output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.378950 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.379021 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.385717 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.439266 4765 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.450727 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:28 crc kubenswrapper[4765]: E1210 06:50:28.451015 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:28.95098434 +0000 UTC m=+148.677649656 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.451188 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:28 crc kubenswrapper[4765]: E1210 06:50:28.451659 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:28.951639369 +0000 UTC m=+148.678304695 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:28 crc kubenswrapper[4765]: E1210 06:50:28.553784 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:29.053758741 +0000 UTC m=+148.780424057 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.553668 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.554270 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:28 crc kubenswrapper[4765]: E1210 06:50:28.554611 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:29.054601635 +0000 UTC m=+148.781267031 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.655501 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:28 crc kubenswrapper[4765]: E1210 06:50:28.655641 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 06:50:29.155613206 +0000 UTC m=+148.882278522 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.656326 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:28 crc kubenswrapper[4765]: E1210 06:50:28.657307 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 06:50:29.157293104 +0000 UTC m=+148.883958420 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wjlkl" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.740887 4765 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-10T06:50:28.439299883Z","Handler":null,"Name":""} Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.743894 4765 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.743927 4765 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.758563 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.764295 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.853046 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:28 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:28 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:28 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.853119 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.866614 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.875368 4765 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.875448 4765 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.914345 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wjlkl\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.928765 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.940891 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.943473 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.946204 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.947823 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 10 06:50:28 crc kubenswrapper[4765]: I1210 06:50:28.949834 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 10 06:50:29 crc kubenswrapper[4765]: I1210 06:50:29.075877 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7p95x" event={"ID":"91562083-eeea-41be-8455-e5fb3ff57453","Type":"ContainerStarted","Data":"33e214b91de580dec1bfca4bbd2808f05bd6107940355a51f2bfe50fbdf158e2"} Dec 10 06:50:29 crc kubenswrapper[4765]: I1210 06:50:29.075988 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7p95x" event={"ID":"91562083-eeea-41be-8455-e5fb3ff57453","Type":"ContainerStarted","Data":"1c3362811e68bb3592deaab79418354b664c70158566f782aa1df831004c94f0"} Dec 10 06:50:29 crc kubenswrapper[4765]: I1210 06:50:29.080768 4765 generic.go:334] "Generic (PLEG): container finished" podID="c444ed13-88e9-41b6-a7d6-c1fa51cb7e01" containerID="8b3c9917c429f0d653cc4a6fab177b0a187ef6e29fed5e5b430fe485fe946084" exitCode=0 Dec 10 06:50:29 crc kubenswrapper[4765]: I1210 06:50:29.080892 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm" event={"ID":"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01","Type":"ContainerDied","Data":"8b3c9917c429f0d653cc4a6fab177b0a187ef6e29fed5e5b430fe485fe946084"} Dec 10 06:50:29 crc kubenswrapper[4765]: I1210 06:50:29.082112 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c219b65-de82-4607-bee6-36d3b530106e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2c219b65-de82-4607-bee6-36d3b530106e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 06:50:29 crc kubenswrapper[4765]: I1210 06:50:29.082156 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2c219b65-de82-4607-bee6-36d3b530106e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2c219b65-de82-4607-bee6-36d3b530106e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 06:50:29 crc kubenswrapper[4765]: I1210 06:50:29.088005 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nshv4" Dec 10 06:50:29 crc kubenswrapper[4765]: I1210 06:50:29.190320 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c219b65-de82-4607-bee6-36d3b530106e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2c219b65-de82-4607-bee6-36d3b530106e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 06:50:29 crc kubenswrapper[4765]: I1210 06:50:29.190449 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2c219b65-de82-4607-bee6-36d3b530106e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2c219b65-de82-4607-bee6-36d3b530106e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 06:50:29 crc kubenswrapper[4765]: I1210 06:50:29.190904 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2c219b65-de82-4607-bee6-36d3b530106e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2c219b65-de82-4607-bee6-36d3b530106e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 06:50:29 crc kubenswrapper[4765]: I1210 06:50:29.249181 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c219b65-de82-4607-bee6-36d3b530106e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2c219b65-de82-4607-bee6-36d3b530106e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 06:50:29 crc kubenswrapper[4765]: I1210 06:50:29.318443 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 06:50:29 crc kubenswrapper[4765]: I1210 06:50:29.504246 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wjlkl"] Dec 10 06:50:29 crc kubenswrapper[4765]: I1210 06:50:29.693048 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 10 06:50:29 crc kubenswrapper[4765]: W1210 06:50:29.747975 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod2c219b65_de82_4607_bee6_36d3b530106e.slice/crio-819d0663670d69356711e3bf1066f746aedc8eed6b7415fec4dac4da601f46db WatchSource:0}: Error finding container 819d0663670d69356711e3bf1066f746aedc8eed6b7415fec4dac4da601f46db: Status 404 returned error can't find the container with id 819d0663670d69356711e3bf1066f746aedc8eed6b7415fec4dac4da601f46db Dec 10 06:50:29 crc kubenswrapper[4765]: I1210 06:50:29.850178 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:29 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:29 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:29 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:29 crc kubenswrapper[4765]: I1210 06:50:29.850247 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.133959 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7p95x" event={"ID":"91562083-eeea-41be-8455-e5fb3ff57453","Type":"ContainerStarted","Data":"6c8189ed5f64da2640c8c54bf912e3b83a7960e221afb71d9901f06ccd8b6b20"} Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.139041 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" event={"ID":"83cb65fc-a542-4331-80d2-2ebccf5d2bff","Type":"ContainerStarted","Data":"7ec0818cdc0bdffab64d3d361356e88faec9ba357551a7363890c77de002dc8f"} Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.139131 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" event={"ID":"83cb65fc-a542-4331-80d2-2ebccf5d2bff","Type":"ContainerStarted","Data":"b95ac3ba41ca52876493524c9fc9f60034cd2019c68bdf525ff4d19d648dfd20"} Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.139450 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.141590 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2c219b65-de82-4607-bee6-36d3b530106e","Type":"ContainerStarted","Data":"819d0663670d69356711e3bf1066f746aedc8eed6b7415fec4dac4da601f46db"} Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.159146 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-7p95x" podStartSLOduration=15.159125042 podStartE2EDuration="15.159125042s" podCreationTimestamp="2025-12-10 06:50:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:30.155405008 +0000 UTC m=+149.882070324" watchObservedRunningTime="2025-12-10 06:50:30.159125042 +0000 UTC m=+149.885790358" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.403534 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.420762 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" podStartSLOduration=130.420743166 podStartE2EDuration="2m10.420743166s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:50:30.184947946 +0000 UTC m=+149.911613292" watchObservedRunningTime="2025-12-10 06:50:30.420743166 +0000 UTC m=+150.147408482" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.529848 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01-secret-volume\") pod \"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01\" (UID: \"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01\") " Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.530042 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01-config-volume\") pod \"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01\" (UID: \"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01\") " Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.530112 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pswj7\" (UniqueName: \"kubernetes.io/projected/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01-kube-api-access-pswj7\") pod \"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01\" (UID: \"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01\") " Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.531701 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01-config-volume" (OuterVolumeSpecName: "config-volume") pod "c444ed13-88e9-41b6-a7d6-c1fa51cb7e01" (UID: "c444ed13-88e9-41b6-a7d6-c1fa51cb7e01"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.544501 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c444ed13-88e9-41b6-a7d6-c1fa51cb7e01" (UID: "c444ed13-88e9-41b6-a7d6-c1fa51cb7e01"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.562579 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01-kube-api-access-pswj7" (OuterVolumeSpecName: "kube-api-access-pswj7") pod "c444ed13-88e9-41b6-a7d6-c1fa51cb7e01" (UID: "c444ed13-88e9-41b6-a7d6-c1fa51cb7e01"). InnerVolumeSpecName "kube-api-access-pswj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.611444 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.631743 4765 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01-config-volume\") on node \"crc\" DevicePath \"\"" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.631790 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pswj7\" (UniqueName: \"kubernetes.io/projected/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01-kube-api-access-pswj7\") on node \"crc\" DevicePath \"\"" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.631805 4765 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.816230 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 10 06:50:30 crc kubenswrapper[4765]: E1210 06:50:30.816588 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c444ed13-88e9-41b6-a7d6-c1fa51cb7e01" containerName="collect-profiles" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.816618 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c444ed13-88e9-41b6-a7d6-c1fa51cb7e01" containerName="collect-profiles" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.816813 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="c444ed13-88e9-41b6-a7d6-c1fa51cb7e01" containerName="collect-profiles" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.817393 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.827621 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.827992 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.831543 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.850192 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:30 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:30 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:30 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.850278 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.935594 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/81fb3942-2175-4415-83ab-1ecc3ec73745-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"81fb3942-2175-4415-83ab-1ecc3ec73745\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 06:50:30 crc kubenswrapper[4765]: I1210 06:50:30.936014 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/81fb3942-2175-4415-83ab-1ecc3ec73745-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"81fb3942-2175-4415-83ab-1ecc3ec73745\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 06:50:31 crc kubenswrapper[4765]: I1210 06:50:31.037305 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/81fb3942-2175-4415-83ab-1ecc3ec73745-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"81fb3942-2175-4415-83ab-1ecc3ec73745\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 06:50:31 crc kubenswrapper[4765]: I1210 06:50:31.037415 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/81fb3942-2175-4415-83ab-1ecc3ec73745-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"81fb3942-2175-4415-83ab-1ecc3ec73745\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 06:50:31 crc kubenswrapper[4765]: I1210 06:50:31.037499 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/81fb3942-2175-4415-83ab-1ecc3ec73745-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"81fb3942-2175-4415-83ab-1ecc3ec73745\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 06:50:31 crc kubenswrapper[4765]: I1210 06:50:31.082063 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/81fb3942-2175-4415-83ab-1ecc3ec73745-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"81fb3942-2175-4415-83ab-1ecc3ec73745\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 06:50:31 crc kubenswrapper[4765]: I1210 06:50:31.162253 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 06:50:31 crc kubenswrapper[4765]: I1210 06:50:31.169033 4765 generic.go:334] "Generic (PLEG): container finished" podID="2c219b65-de82-4607-bee6-36d3b530106e" containerID="6d6b24964ac064a2894569c62b8e499ca08c1af34d3aa2d27594c65b93a9da89" exitCode=0 Dec 10 06:50:31 crc kubenswrapper[4765]: I1210 06:50:31.169199 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2c219b65-de82-4607-bee6-36d3b530106e","Type":"ContainerDied","Data":"6d6b24964ac064a2894569c62b8e499ca08c1af34d3aa2d27594c65b93a9da89"} Dec 10 06:50:31 crc kubenswrapper[4765]: I1210 06:50:31.242185 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm" Dec 10 06:50:31 crc kubenswrapper[4765]: I1210 06:50:31.242649 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm" event={"ID":"c444ed13-88e9-41b6-a7d6-c1fa51cb7e01","Type":"ContainerDied","Data":"2e1c516b8c189971fcae4df7c5359edd90c4d77d14564a8cb4919b0597cd18dc"} Dec 10 06:50:31 crc kubenswrapper[4765]: I1210 06:50:31.242673 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e1c516b8c189971fcae4df7c5359edd90c4d77d14564a8cb4919b0597cd18dc" Dec 10 06:50:31 crc kubenswrapper[4765]: I1210 06:50:31.853598 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:31 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:31 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:31 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:31 crc kubenswrapper[4765]: I1210 06:50:31.853674 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:32 crc kubenswrapper[4765]: I1210 06:50:32.057597 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:32 crc kubenswrapper[4765]: I1210 06:50:32.063345 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-gvn7z" Dec 10 06:50:32 crc kubenswrapper[4765]: I1210 06:50:32.849173 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:32 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:32 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:32 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:32 crc kubenswrapper[4765]: I1210 06:50:32.849780 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:33 crc kubenswrapper[4765]: I1210 06:50:33.710134 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-ns797" Dec 10 06:50:33 crc kubenswrapper[4765]: I1210 06:50:33.847993 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:33 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:33 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:33 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:33 crc kubenswrapper[4765]: I1210 06:50:33.848078 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:34 crc kubenswrapper[4765]: I1210 06:50:34.049952 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 06:50:34 crc kubenswrapper[4765]: I1210 06:50:34.050252 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 06:50:34 crc kubenswrapper[4765]: I1210 06:50:34.848388 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:34 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:34 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:34 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:34 crc kubenswrapper[4765]: I1210 06:50:34.848666 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:35 crc kubenswrapper[4765]: I1210 06:50:35.848685 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:35 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:35 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:35 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:35 crc kubenswrapper[4765]: I1210 06:50:35.848735 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:36 crc kubenswrapper[4765]: I1210 06:50:36.850918 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:36 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:36 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:36 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:36 crc kubenswrapper[4765]: I1210 06:50:36.851066 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:37 crc kubenswrapper[4765]: I1210 06:50:37.526945 4765 patch_prober.go:28] interesting pod/downloads-7954f5f757-lxxm6 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.23:8080/\": dial tcp 10.217.0.23:8080: connect: connection refused" start-of-body= Dec 10 06:50:37 crc kubenswrapper[4765]: I1210 06:50:37.527000 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-lxxm6" podUID="7b597651-9700-4c4b-9d2d-c21dc37c1959" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.23:8080/\": dial tcp 10.217.0.23:8080: connect: connection refused" Dec 10 06:50:37 crc kubenswrapper[4765]: I1210 06:50:37.527184 4765 patch_prober.go:28] interesting pod/downloads-7954f5f757-lxxm6 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.23:8080/\": dial tcp 10.217.0.23:8080: connect: connection refused" start-of-body= Dec 10 06:50:37 crc kubenswrapper[4765]: I1210 06:50:37.527242 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-lxxm6" podUID="7b597651-9700-4c4b-9d2d-c21dc37c1959" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.23:8080/\": dial tcp 10.217.0.23:8080: connect: connection refused" Dec 10 06:50:37 crc kubenswrapper[4765]: I1210 06:50:37.849180 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:37 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:37 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:37 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:37 crc kubenswrapper[4765]: I1210 06:50:37.849527 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:38 crc kubenswrapper[4765]: I1210 06:50:38.363020 4765 patch_prober.go:28] interesting pod/console-f9d7485db-fv6zj container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Dec 10 06:50:38 crc kubenswrapper[4765]: I1210 06:50:38.363125 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-fv6zj" podUID="2d2049d7-de64-4070-959f-8cefd1f15e5d" containerName="console" probeResult="failure" output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" Dec 10 06:50:38 crc kubenswrapper[4765]: I1210 06:50:38.848169 4765 patch_prober.go:28] interesting pod/router-default-5444994796-svfhs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 06:50:38 crc kubenswrapper[4765]: [-]has-synced failed: reason withheld Dec 10 06:50:38 crc kubenswrapper[4765]: [+]process-running ok Dec 10 06:50:38 crc kubenswrapper[4765]: healthz check failed Dec 10 06:50:38 crc kubenswrapper[4765]: I1210 06:50:38.848261 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-svfhs" podUID="3ce9501a-7d19-42bf-94fc-b63427ef3c12" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 06:50:39 crc kubenswrapper[4765]: I1210 06:50:39.857275 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:39 crc kubenswrapper[4765]: I1210 06:50:39.860311 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-svfhs" Dec 10 06:50:41 crc kubenswrapper[4765]: I1210 06:50:41.975590 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs\") pod \"network-metrics-daemon-k9sld\" (UID: \"efb71311-50ec-4765-8caf-6f2e02b8dce9\") " pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:50:41 crc kubenswrapper[4765]: I1210 06:50:41.982256 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/efb71311-50ec-4765-8caf-6f2e02b8dce9-metrics-certs\") pod \"network-metrics-daemon-k9sld\" (UID: \"efb71311-50ec-4765-8caf-6f2e02b8dce9\") " pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:50:42 crc kubenswrapper[4765]: I1210 06:50:42.204927 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k9sld" Dec 10 06:50:47 crc kubenswrapper[4765]: I1210 06:50:47.139261 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 06:50:47 crc kubenswrapper[4765]: I1210 06:50:47.258929 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2c219b65-de82-4607-bee6-36d3b530106e-kubelet-dir\") pod \"2c219b65-de82-4607-bee6-36d3b530106e\" (UID: \"2c219b65-de82-4607-bee6-36d3b530106e\") " Dec 10 06:50:47 crc kubenswrapper[4765]: I1210 06:50:47.259028 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2c219b65-de82-4607-bee6-36d3b530106e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2c219b65-de82-4607-bee6-36d3b530106e" (UID: "2c219b65-de82-4607-bee6-36d3b530106e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:50:47 crc kubenswrapper[4765]: I1210 06:50:47.259087 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c219b65-de82-4607-bee6-36d3b530106e-kube-api-access\") pod \"2c219b65-de82-4607-bee6-36d3b530106e\" (UID: \"2c219b65-de82-4607-bee6-36d3b530106e\") " Dec 10 06:50:47 crc kubenswrapper[4765]: I1210 06:50:47.259408 4765 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2c219b65-de82-4607-bee6-36d3b530106e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 10 06:50:47 crc kubenswrapper[4765]: I1210 06:50:47.262451 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c219b65-de82-4607-bee6-36d3b530106e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2c219b65-de82-4607-bee6-36d3b530106e" (UID: "2c219b65-de82-4607-bee6-36d3b530106e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:50:47 crc kubenswrapper[4765]: I1210 06:50:47.360704 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c219b65-de82-4607-bee6-36d3b530106e-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 06:50:47 crc kubenswrapper[4765]: I1210 06:50:47.493190 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2c219b65-de82-4607-bee6-36d3b530106e","Type":"ContainerDied","Data":"819d0663670d69356711e3bf1066f746aedc8eed6b7415fec4dac4da601f46db"} Dec 10 06:50:47 crc kubenswrapper[4765]: I1210 06:50:47.493235 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="819d0663670d69356711e3bf1066f746aedc8eed6b7415fec4dac4da601f46db" Dec 10 06:50:47 crc kubenswrapper[4765]: I1210 06:50:47.493253 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 06:50:47 crc kubenswrapper[4765]: I1210 06:50:47.528600 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-lxxm6" Dec 10 06:50:48 crc kubenswrapper[4765]: I1210 06:50:48.365462 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:48 crc kubenswrapper[4765]: I1210 06:50:48.369995 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 06:50:48 crc kubenswrapper[4765]: I1210 06:50:48.935897 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:50:58 crc kubenswrapper[4765]: I1210 06:50:58.555149 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-s5sr5" Dec 10 06:51:03 crc kubenswrapper[4765]: I1210 06:51:03.397172 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 10 06:51:03 crc kubenswrapper[4765]: E1210 06:51:03.398224 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c219b65-de82-4607-bee6-36d3b530106e" containerName="pruner" Dec 10 06:51:03 crc kubenswrapper[4765]: I1210 06:51:03.398242 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c219b65-de82-4607-bee6-36d3b530106e" containerName="pruner" Dec 10 06:51:03 crc kubenswrapper[4765]: I1210 06:51:03.398357 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c219b65-de82-4607-bee6-36d3b530106e" containerName="pruner" Dec 10 06:51:03 crc kubenswrapper[4765]: I1210 06:51:03.398794 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 06:51:03 crc kubenswrapper[4765]: I1210 06:51:03.408947 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 10 06:51:03 crc kubenswrapper[4765]: I1210 06:51:03.528442 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/12e10e8d-19ff-459c-8215-2aeb1a493e4e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"12e10e8d-19ff-459c-8215-2aeb1a493e4e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 06:51:03 crc kubenswrapper[4765]: I1210 06:51:03.528525 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/12e10e8d-19ff-459c-8215-2aeb1a493e4e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"12e10e8d-19ff-459c-8215-2aeb1a493e4e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 06:51:03 crc kubenswrapper[4765]: I1210 06:51:03.629781 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/12e10e8d-19ff-459c-8215-2aeb1a493e4e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"12e10e8d-19ff-459c-8215-2aeb1a493e4e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 06:51:03 crc kubenswrapper[4765]: I1210 06:51:03.629866 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/12e10e8d-19ff-459c-8215-2aeb1a493e4e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"12e10e8d-19ff-459c-8215-2aeb1a493e4e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 06:51:03 crc kubenswrapper[4765]: I1210 06:51:03.630030 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/12e10e8d-19ff-459c-8215-2aeb1a493e4e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"12e10e8d-19ff-459c-8215-2aeb1a493e4e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 06:51:03 crc kubenswrapper[4765]: I1210 06:51:03.653203 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/12e10e8d-19ff-459c-8215-2aeb1a493e4e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"12e10e8d-19ff-459c-8215-2aeb1a493e4e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 06:51:03 crc kubenswrapper[4765]: I1210 06:51:03.722272 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 06:51:04 crc kubenswrapper[4765]: I1210 06:51:04.049148 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 06:51:04 crc kubenswrapper[4765]: I1210 06:51:04.049269 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 06:51:07 crc kubenswrapper[4765]: I1210 06:51:07.365069 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 06:51:08 crc kubenswrapper[4765]: E1210 06:51:08.090253 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 10 06:51:08 crc kubenswrapper[4765]: E1210 06:51:08.090860 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7spmh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-q9xg4_openshift-marketplace(2f0fa6f1-2039-4003-88b8-d0fccce70b29): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 06:51:08 crc kubenswrapper[4765]: E1210 06:51:08.092072 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-q9xg4" podUID="2f0fa6f1-2039-4003-88b8-d0fccce70b29" Dec 10 06:51:08 crc kubenswrapper[4765]: I1210 06:51:08.800685 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 10 06:51:08 crc kubenswrapper[4765]: I1210 06:51:08.802372 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 10 06:51:08 crc kubenswrapper[4765]: I1210 06:51:08.805162 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 10 06:51:09 crc kubenswrapper[4765]: I1210 06:51:09.003278 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/eef505b2-623b-437c-8207-550da3a806ba-var-lock\") pod \"installer-9-crc\" (UID: \"eef505b2-623b-437c-8207-550da3a806ba\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 06:51:09 crc kubenswrapper[4765]: I1210 06:51:09.003355 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eef505b2-623b-437c-8207-550da3a806ba-kubelet-dir\") pod \"installer-9-crc\" (UID: \"eef505b2-623b-437c-8207-550da3a806ba\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 06:51:09 crc kubenswrapper[4765]: I1210 06:51:09.003382 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eef505b2-623b-437c-8207-550da3a806ba-kube-api-access\") pod \"installer-9-crc\" (UID: \"eef505b2-623b-437c-8207-550da3a806ba\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 06:51:09 crc kubenswrapper[4765]: I1210 06:51:09.104337 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eef505b2-623b-437c-8207-550da3a806ba-kubelet-dir\") pod \"installer-9-crc\" (UID: \"eef505b2-623b-437c-8207-550da3a806ba\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 06:51:09 crc kubenswrapper[4765]: I1210 06:51:09.104384 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eef505b2-623b-437c-8207-550da3a806ba-kube-api-access\") pod \"installer-9-crc\" (UID: \"eef505b2-623b-437c-8207-550da3a806ba\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 06:51:09 crc kubenswrapper[4765]: I1210 06:51:09.104479 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/eef505b2-623b-437c-8207-550da3a806ba-var-lock\") pod \"installer-9-crc\" (UID: \"eef505b2-623b-437c-8207-550da3a806ba\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 06:51:09 crc kubenswrapper[4765]: I1210 06:51:09.104475 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eef505b2-623b-437c-8207-550da3a806ba-kubelet-dir\") pod \"installer-9-crc\" (UID: \"eef505b2-623b-437c-8207-550da3a806ba\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 06:51:09 crc kubenswrapper[4765]: I1210 06:51:09.104573 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/eef505b2-623b-437c-8207-550da3a806ba-var-lock\") pod \"installer-9-crc\" (UID: \"eef505b2-623b-437c-8207-550da3a806ba\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 06:51:09 crc kubenswrapper[4765]: I1210 06:51:09.121899 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eef505b2-623b-437c-8207-550da3a806ba-kube-api-access\") pod \"installer-9-crc\" (UID: \"eef505b2-623b-437c-8207-550da3a806ba\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 06:51:09 crc kubenswrapper[4765]: I1210 06:51:09.126673 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 10 06:51:12 crc kubenswrapper[4765]: E1210 06:51:12.251293 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-q9xg4" podUID="2f0fa6f1-2039-4003-88b8-d0fccce70b29" Dec 10 06:51:12 crc kubenswrapper[4765]: E1210 06:51:12.327235 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 10 06:51:12 crc kubenswrapper[4765]: E1210 06:51:12.327548 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xx5dg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-vcct8_openshift-marketplace(4f7e189b-f64e-4a06-b5ac-ec94f0f3c994): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 06:51:12 crc kubenswrapper[4765]: E1210 06:51:12.328732 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-vcct8" podUID="4f7e189b-f64e-4a06-b5ac-ec94f0f3c994" Dec 10 06:51:12 crc kubenswrapper[4765]: E1210 06:51:12.482348 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 10 06:51:12 crc kubenswrapper[4765]: E1210 06:51:12.482500 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hrtnk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-98pgv_openshift-marketplace(c5fd4051-3069-4ca7-829d-ad57d3d50c4c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 06:51:12 crc kubenswrapper[4765]: E1210 06:51:12.483683 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-98pgv" podUID="c5fd4051-3069-4ca7-829d-ad57d3d50c4c" Dec 10 06:51:13 crc kubenswrapper[4765]: E1210 06:51:13.223467 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-vcct8" podUID="4f7e189b-f64e-4a06-b5ac-ec94f0f3c994" Dec 10 06:51:13 crc kubenswrapper[4765]: E1210 06:51:13.223511 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-98pgv" podUID="c5fd4051-3069-4ca7-829d-ad57d3d50c4c" Dec 10 06:51:13 crc kubenswrapper[4765]: E1210 06:51:13.289475 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 10 06:51:13 crc kubenswrapper[4765]: E1210 06:51:13.289661 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t6w68,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-cqthw_openshift-marketplace(2a320eac-17e0-42a1-8e4d-3f1c87a72e92): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 06:51:13 crc kubenswrapper[4765]: E1210 06:51:13.290911 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-cqthw" podUID="2a320eac-17e0-42a1-8e4d-3f1c87a72e92" Dec 10 06:51:13 crc kubenswrapper[4765]: E1210 06:51:13.310011 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 10 06:51:13 crc kubenswrapper[4765]: E1210 06:51:13.310171 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n8xss,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-dzrqt_openshift-marketplace(c073bf0a-0806-4af1-9902-a0fe221901be): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 06:51:13 crc kubenswrapper[4765]: E1210 06:51:13.311274 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-dzrqt" podUID="c073bf0a-0806-4af1-9902-a0fe221901be" Dec 10 06:51:13 crc kubenswrapper[4765]: E1210 06:51:13.313492 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 10 06:51:13 crc kubenswrapper[4765]: E1210 06:51:13.313661 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tkgcm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-mklh7_openshift-marketplace(1d0a35e0-faa1-4efe-9861-75f4d1fa7f55): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 06:51:13 crc kubenswrapper[4765]: E1210 06:51:13.315558 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-mklh7" podUID="1d0a35e0-faa1-4efe-9861-75f4d1fa7f55" Dec 10 06:51:15 crc kubenswrapper[4765]: E1210 06:51:15.122939 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-mklh7" podUID="1d0a35e0-faa1-4efe-9861-75f4d1fa7f55" Dec 10 06:51:15 crc kubenswrapper[4765]: E1210 06:51:15.122984 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-cqthw" podUID="2a320eac-17e0-42a1-8e4d-3f1c87a72e92" Dec 10 06:51:15 crc kubenswrapper[4765]: E1210 06:51:15.122983 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-dzrqt" podUID="c073bf0a-0806-4af1-9902-a0fe221901be" Dec 10 06:51:15 crc kubenswrapper[4765]: E1210 06:51:15.243667 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 10 06:51:15 crc kubenswrapper[4765]: E1210 06:51:15.244197 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6rvxf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-k44ps_openshift-marketplace(7b4761b3-3632-4441-897d-4ab2635b7630): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 06:51:15 crc kubenswrapper[4765]: E1210 06:51:15.245556 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-k44ps" podUID="7b4761b3-3632-4441-897d-4ab2635b7630" Dec 10 06:51:15 crc kubenswrapper[4765]: E1210 06:51:15.285231 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 10 06:51:15 crc kubenswrapper[4765]: E1210 06:51:15.285370 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gvljv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-l9jw8_openshift-marketplace(a7909736-a9e6-4791-942b-31997fe6d3ee): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 06:51:15 crc kubenswrapper[4765]: E1210 06:51:15.286706 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-l9jw8" podUID="a7909736-a9e6-4791-942b-31997fe6d3ee" Dec 10 06:51:15 crc kubenswrapper[4765]: I1210 06:51:15.566855 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 10 06:51:15 crc kubenswrapper[4765]: I1210 06:51:15.575831 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-k9sld"] Dec 10 06:51:15 crc kubenswrapper[4765]: I1210 06:51:15.669621 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-k9sld" event={"ID":"efb71311-50ec-4765-8caf-6f2e02b8dce9","Type":"ContainerStarted","Data":"f9b3c9c666acebab5b72dbf1d07b756a55d7324bc3cf3cfc52b0dc56023ef79e"} Dec 10 06:51:15 crc kubenswrapper[4765]: I1210 06:51:15.675361 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 10 06:51:15 crc kubenswrapper[4765]: I1210 06:51:15.675404 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 10 06:51:15 crc kubenswrapper[4765]: I1210 06:51:15.680970 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"81fb3942-2175-4415-83ab-1ecc3ec73745","Type":"ContainerStarted","Data":"e3cd9b958cbf9b4cec63ac9c3aa458f0f58712b3ca4cf92f5cf1e47c19472de6"} Dec 10 06:51:15 crc kubenswrapper[4765]: E1210 06:51:15.684253 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-k44ps" podUID="7b4761b3-3632-4441-897d-4ab2635b7630" Dec 10 06:51:15 crc kubenswrapper[4765]: E1210 06:51:15.684334 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-l9jw8" podUID="a7909736-a9e6-4791-942b-31997fe6d3ee" Dec 10 06:51:15 crc kubenswrapper[4765]: W1210 06:51:15.697617 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod12e10e8d_19ff_459c_8215_2aeb1a493e4e.slice/crio-44b071ef79cee9e981e19f8123862166d866280f92af56d8c3ca3b101226cda8 WatchSource:0}: Error finding container 44b071ef79cee9e981e19f8123862166d866280f92af56d8c3ca3b101226cda8: Status 404 returned error can't find the container with id 44b071ef79cee9e981e19f8123862166d866280f92af56d8c3ca3b101226cda8 Dec 10 06:51:15 crc kubenswrapper[4765]: W1210 06:51:15.700445 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podeef505b2_623b_437c_8207_550da3a806ba.slice/crio-8db4ad127ffa99c981b4fb1daa77bf98f8179aecf60a946336eee50f1018d7df WatchSource:0}: Error finding container 8db4ad127ffa99c981b4fb1daa77bf98f8179aecf60a946336eee50f1018d7df: Status 404 returned error can't find the container with id 8db4ad127ffa99c981b4fb1daa77bf98f8179aecf60a946336eee50f1018d7df Dec 10 06:51:16 crc kubenswrapper[4765]: I1210 06:51:16.700204 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-k9sld" event={"ID":"efb71311-50ec-4765-8caf-6f2e02b8dce9","Type":"ContainerStarted","Data":"65610913a87242127ebb019135c3010b9f5e60ad4608c8292de95bf145a902e2"} Dec 10 06:51:16 crc kubenswrapper[4765]: I1210 06:51:16.700761 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-k9sld" event={"ID":"efb71311-50ec-4765-8caf-6f2e02b8dce9","Type":"ContainerStarted","Data":"620990889c38aabb52128b8c1fc797368763551a2450dd42a0ee9ed6eefcf6b3"} Dec 10 06:51:16 crc kubenswrapper[4765]: I1210 06:51:16.702500 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"12e10e8d-19ff-459c-8215-2aeb1a493e4e","Type":"ContainerStarted","Data":"4d329e724011e8ab028a89fa3f921815ae44260aacdad0d92d069f3bc1a92dbf"} Dec 10 06:51:16 crc kubenswrapper[4765]: I1210 06:51:16.702569 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"12e10e8d-19ff-459c-8215-2aeb1a493e4e","Type":"ContainerStarted","Data":"44b071ef79cee9e981e19f8123862166d866280f92af56d8c3ca3b101226cda8"} Dec 10 06:51:16 crc kubenswrapper[4765]: I1210 06:51:16.703974 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"81fb3942-2175-4415-83ab-1ecc3ec73745","Type":"ContainerStarted","Data":"9c1e30b47304fa113674b8cd5dc636e1327e68e7739421533ff35cfc77814bfd"} Dec 10 06:51:16 crc kubenswrapper[4765]: I1210 06:51:16.705750 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"eef505b2-623b-437c-8207-550da3a806ba","Type":"ContainerStarted","Data":"620c2d5d9055cc2556ee493712a2ac52b1b2bb899d8b00b1f5e3fddedd818fb7"} Dec 10 06:51:16 crc kubenswrapper[4765]: I1210 06:51:16.705784 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"eef505b2-623b-437c-8207-550da3a806ba","Type":"ContainerStarted","Data":"8db4ad127ffa99c981b4fb1daa77bf98f8179aecf60a946336eee50f1018d7df"} Dec 10 06:51:16 crc kubenswrapper[4765]: I1210 06:51:16.739965 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-k9sld" podStartSLOduration=176.739945526 podStartE2EDuration="2m56.739945526s" podCreationTimestamp="2025-12-10 06:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:51:16.717343802 +0000 UTC m=+196.444009118" watchObservedRunningTime="2025-12-10 06:51:16.739945526 +0000 UTC m=+196.466610842" Dec 10 06:51:16 crc kubenswrapper[4765]: I1210 06:51:16.741793 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=8.741782218 podStartE2EDuration="8.741782218s" podCreationTimestamp="2025-12-10 06:51:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:51:16.736400907 +0000 UTC m=+196.463066233" watchObservedRunningTime="2025-12-10 06:51:16.741782218 +0000 UTC m=+196.468447534" Dec 10 06:51:16 crc kubenswrapper[4765]: I1210 06:51:16.757864 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=13.757850028 podStartE2EDuration="13.757850028s" podCreationTimestamp="2025-12-10 06:51:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:51:16.75506581 +0000 UTC m=+196.481731136" watchObservedRunningTime="2025-12-10 06:51:16.757850028 +0000 UTC m=+196.484515344" Dec 10 06:51:16 crc kubenswrapper[4765]: I1210 06:51:16.769499 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=46.769478024 podStartE2EDuration="46.769478024s" podCreationTimestamp="2025-12-10 06:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:51:16.768494656 +0000 UTC m=+196.495159972" watchObservedRunningTime="2025-12-10 06:51:16.769478024 +0000 UTC m=+196.496143360" Dec 10 06:51:17 crc kubenswrapper[4765]: I1210 06:51:17.712347 4765 generic.go:334] "Generic (PLEG): container finished" podID="81fb3942-2175-4415-83ab-1ecc3ec73745" containerID="9c1e30b47304fa113674b8cd5dc636e1327e68e7739421533ff35cfc77814bfd" exitCode=0 Dec 10 06:51:17 crc kubenswrapper[4765]: I1210 06:51:17.712546 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"81fb3942-2175-4415-83ab-1ecc3ec73745","Type":"ContainerDied","Data":"9c1e30b47304fa113674b8cd5dc636e1327e68e7739421533ff35cfc77814bfd"} Dec 10 06:51:17 crc kubenswrapper[4765]: I1210 06:51:17.715824 4765 generic.go:334] "Generic (PLEG): container finished" podID="12e10e8d-19ff-459c-8215-2aeb1a493e4e" containerID="4d329e724011e8ab028a89fa3f921815ae44260aacdad0d92d069f3bc1a92dbf" exitCode=0 Dec 10 06:51:17 crc kubenswrapper[4765]: I1210 06:51:17.716657 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"12e10e8d-19ff-459c-8215-2aeb1a493e4e","Type":"ContainerDied","Data":"4d329e724011e8ab028a89fa3f921815ae44260aacdad0d92d069f3bc1a92dbf"} Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.018646 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.022224 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.139554 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/81fb3942-2175-4415-83ab-1ecc3ec73745-kubelet-dir\") pod \"81fb3942-2175-4415-83ab-1ecc3ec73745\" (UID: \"81fb3942-2175-4415-83ab-1ecc3ec73745\") " Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.139628 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/81fb3942-2175-4415-83ab-1ecc3ec73745-kube-api-access\") pod \"81fb3942-2175-4415-83ab-1ecc3ec73745\" (UID: \"81fb3942-2175-4415-83ab-1ecc3ec73745\") " Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.139660 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/12e10e8d-19ff-459c-8215-2aeb1a493e4e-kubelet-dir\") pod \"12e10e8d-19ff-459c-8215-2aeb1a493e4e\" (UID: \"12e10e8d-19ff-459c-8215-2aeb1a493e4e\") " Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.139676 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/12e10e8d-19ff-459c-8215-2aeb1a493e4e-kube-api-access\") pod \"12e10e8d-19ff-459c-8215-2aeb1a493e4e\" (UID: \"12e10e8d-19ff-459c-8215-2aeb1a493e4e\") " Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.139710 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/81fb3942-2175-4415-83ab-1ecc3ec73745-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "81fb3942-2175-4415-83ab-1ecc3ec73745" (UID: "81fb3942-2175-4415-83ab-1ecc3ec73745"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.139964 4765 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/81fb3942-2175-4415-83ab-1ecc3ec73745-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.139998 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/12e10e8d-19ff-459c-8215-2aeb1a493e4e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "12e10e8d-19ff-459c-8215-2aeb1a493e4e" (UID: "12e10e8d-19ff-459c-8215-2aeb1a493e4e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.145087 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81fb3942-2175-4415-83ab-1ecc3ec73745-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "81fb3942-2175-4415-83ab-1ecc3ec73745" (UID: "81fb3942-2175-4415-83ab-1ecc3ec73745"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.159293 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12e10e8d-19ff-459c-8215-2aeb1a493e4e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "12e10e8d-19ff-459c-8215-2aeb1a493e4e" (UID: "12e10e8d-19ff-459c-8215-2aeb1a493e4e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.241015 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/81fb3942-2175-4415-83ab-1ecc3ec73745-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.241266 4765 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/12e10e8d-19ff-459c-8215-2aeb1a493e4e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.241274 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/12e10e8d-19ff-459c-8215-2aeb1a493e4e-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.727302 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"12e10e8d-19ff-459c-8215-2aeb1a493e4e","Type":"ContainerDied","Data":"44b071ef79cee9e981e19f8123862166d866280f92af56d8c3ca3b101226cda8"} Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.727371 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="44b071ef79cee9e981e19f8123862166d866280f92af56d8c3ca3b101226cda8" Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.727320 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.728752 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"81fb3942-2175-4415-83ab-1ecc3ec73745","Type":"ContainerDied","Data":"e3cd9b958cbf9b4cec63ac9c3aa458f0f58712b3ca4cf92f5cf1e47c19472de6"} Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.728868 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3cd9b958cbf9b4cec63ac9c3aa458f0f58712b3ca4cf92f5cf1e47c19472de6" Dec 10 06:51:19 crc kubenswrapper[4765]: I1210 06:51:19.728838 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 06:51:27 crc kubenswrapper[4765]: I1210 06:51:27.782813 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q9xg4" event={"ID":"2f0fa6f1-2039-4003-88b8-d0fccce70b29","Type":"ContainerStarted","Data":"6c5cf3f5ee38ba12376accdedeb395de9733bae1a9a6defe6c20d9cb199c0f45"} Dec 10 06:51:28 crc kubenswrapper[4765]: I1210 06:51:28.788709 4765 generic.go:334] "Generic (PLEG): container finished" podID="2f0fa6f1-2039-4003-88b8-d0fccce70b29" containerID="6c5cf3f5ee38ba12376accdedeb395de9733bae1a9a6defe6c20d9cb199c0f45" exitCode=0 Dec 10 06:51:28 crc kubenswrapper[4765]: I1210 06:51:28.788750 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q9xg4" event={"ID":"2f0fa6f1-2039-4003-88b8-d0fccce70b29","Type":"ContainerDied","Data":"6c5cf3f5ee38ba12376accdedeb395de9733bae1a9a6defe6c20d9cb199c0f45"} Dec 10 06:51:34 crc kubenswrapper[4765]: I1210 06:51:34.049225 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 06:51:34 crc kubenswrapper[4765]: I1210 06:51:34.049598 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 06:51:34 crc kubenswrapper[4765]: I1210 06:51:34.050023 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 06:51:34 crc kubenswrapper[4765]: I1210 06:51:34.050776 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe"} pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 06:51:34 crc kubenswrapper[4765]: I1210 06:51:34.050926 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" containerID="cri-o://f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe" gracePeriod=600 Dec 10 06:51:37 crc kubenswrapper[4765]: I1210 06:51:37.834000 4765 generic.go:334] "Generic (PLEG): container finished" podID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerID="f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe" exitCode=0 Dec 10 06:51:37 crc kubenswrapper[4765]: I1210 06:51:37.834076 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerDied","Data":"f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe"} Dec 10 06:51:40 crc kubenswrapper[4765]: I1210 06:51:40.853226 4765 generic.go:334] "Generic (PLEG): container finished" podID="2a320eac-17e0-42a1-8e4d-3f1c87a72e92" containerID="91489819da3c4cd27a457ba82a733f6c7912a44f9193ad6e673aef29a766f552" exitCode=0 Dec 10 06:51:40 crc kubenswrapper[4765]: I1210 06:51:40.853682 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cqthw" event={"ID":"2a320eac-17e0-42a1-8e4d-3f1c87a72e92","Type":"ContainerDied","Data":"91489819da3c4cd27a457ba82a733f6c7912a44f9193ad6e673aef29a766f552"} Dec 10 06:51:40 crc kubenswrapper[4765]: I1210 06:51:40.859373 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"7376f4bc9252f299181bb5fb7be283e9a1bce4b9e100c58bf80840e511f0752e"} Dec 10 06:51:40 crc kubenswrapper[4765]: I1210 06:51:40.861977 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q9xg4" event={"ID":"2f0fa6f1-2039-4003-88b8-d0fccce70b29","Type":"ContainerStarted","Data":"33185da7922357082c832fb0ac239898c5cd6f5f50d8c867f56e121266916ebc"} Dec 10 06:51:40 crc kubenswrapper[4765]: I1210 06:51:40.864709 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mklh7" event={"ID":"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55","Type":"ContainerStarted","Data":"34a291d80ce76d65e373ab44e4c3ae603e5eec51de0da16b1321923a76fd36da"} Dec 10 06:51:40 crc kubenswrapper[4765]: I1210 06:51:40.866697 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-98pgv" event={"ID":"c5fd4051-3069-4ca7-829d-ad57d3d50c4c","Type":"ContainerStarted","Data":"4086ccb48734c6ed8c36be50924b59c9cb23ff942c2edce231a8f0602323f841"} Dec 10 06:51:40 crc kubenswrapper[4765]: I1210 06:51:40.869582 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vcct8" event={"ID":"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994","Type":"ContainerStarted","Data":"9607fc725c18ff05f81fbdc6cd28582549c7ca82ab43e4ad7193dbe45fceee93"} Dec 10 06:51:40 crc kubenswrapper[4765]: I1210 06:51:40.910166 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-q9xg4" podStartSLOduration=3.667030109 podStartE2EDuration="1m19.910145765s" podCreationTimestamp="2025-12-10 06:50:21 +0000 UTC" firstStartedPulling="2025-12-10 06:50:24.328291476 +0000 UTC m=+144.054956792" lastFinishedPulling="2025-12-10 06:51:40.571407132 +0000 UTC m=+220.298072448" observedRunningTime="2025-12-10 06:51:40.890820332 +0000 UTC m=+220.617485648" watchObservedRunningTime="2025-12-10 06:51:40.910145765 +0000 UTC m=+220.636811081" Dec 10 06:51:41 crc kubenswrapper[4765]: I1210 06:51:41.876561 4765 generic.go:334] "Generic (PLEG): container finished" podID="c5fd4051-3069-4ca7-829d-ad57d3d50c4c" containerID="4086ccb48734c6ed8c36be50924b59c9cb23ff942c2edce231a8f0602323f841" exitCode=0 Dec 10 06:51:41 crc kubenswrapper[4765]: I1210 06:51:41.876629 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-98pgv" event={"ID":"c5fd4051-3069-4ca7-829d-ad57d3d50c4c","Type":"ContainerDied","Data":"4086ccb48734c6ed8c36be50924b59c9cb23ff942c2edce231a8f0602323f841"} Dec 10 06:51:41 crc kubenswrapper[4765]: I1210 06:51:41.878791 4765 generic.go:334] "Generic (PLEG): container finished" podID="4f7e189b-f64e-4a06-b5ac-ec94f0f3c994" containerID="9607fc725c18ff05f81fbdc6cd28582549c7ca82ab43e4ad7193dbe45fceee93" exitCode=0 Dec 10 06:51:41 crc kubenswrapper[4765]: I1210 06:51:41.878846 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vcct8" event={"ID":"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994","Type":"ContainerDied","Data":"9607fc725c18ff05f81fbdc6cd28582549c7ca82ab43e4ad7193dbe45fceee93"} Dec 10 06:51:41 crc kubenswrapper[4765]: I1210 06:51:41.881746 4765 generic.go:334] "Generic (PLEG): container finished" podID="1d0a35e0-faa1-4efe-9861-75f4d1fa7f55" containerID="34a291d80ce76d65e373ab44e4c3ae603e5eec51de0da16b1321923a76fd36da" exitCode=0 Dec 10 06:51:41 crc kubenswrapper[4765]: I1210 06:51:41.881939 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mklh7" event={"ID":"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55","Type":"ContainerDied","Data":"34a291d80ce76d65e373ab44e4c3ae603e5eec51de0da16b1321923a76fd36da"} Dec 10 06:51:43 crc kubenswrapper[4765]: I1210 06:51:43.142867 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-q9xg4" Dec 10 06:51:43 crc kubenswrapper[4765]: I1210 06:51:43.142917 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-q9xg4" Dec 10 06:51:44 crc kubenswrapper[4765]: I1210 06:51:44.147601 4765 generic.go:334] "Generic (PLEG): container finished" podID="7b4761b3-3632-4441-897d-4ab2635b7630" containerID="36eb4ed04d06c558490818ab8829f39650f70921361da031541aec7dd70d62b2" exitCode=0 Dec 10 06:51:44 crc kubenswrapper[4765]: I1210 06:51:44.147795 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k44ps" event={"ID":"7b4761b3-3632-4441-897d-4ab2635b7630","Type":"ContainerDied","Data":"36eb4ed04d06c558490818ab8829f39650f70921361da031541aec7dd70d62b2"} Dec 10 06:51:44 crc kubenswrapper[4765]: I1210 06:51:44.153351 4765 generic.go:334] "Generic (PLEG): container finished" podID="c073bf0a-0806-4af1-9902-a0fe221901be" containerID="2cc19ae58a4f36e671e12b3b76273926b12bcb03a61a159a75f1ee914161f004" exitCode=0 Dec 10 06:51:44 crc kubenswrapper[4765]: I1210 06:51:44.153449 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dzrqt" event={"ID":"c073bf0a-0806-4af1-9902-a0fe221901be","Type":"ContainerDied","Data":"2cc19ae58a4f36e671e12b3b76273926b12bcb03a61a159a75f1ee914161f004"} Dec 10 06:51:44 crc kubenswrapper[4765]: I1210 06:51:44.162582 4765 generic.go:334] "Generic (PLEG): container finished" podID="a7909736-a9e6-4791-942b-31997fe6d3ee" containerID="8f12008bce00b371efa068a390d155a157414319e4353cbc87cc675aa03e1576" exitCode=0 Dec 10 06:51:44 crc kubenswrapper[4765]: I1210 06:51:44.163331 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l9jw8" event={"ID":"a7909736-a9e6-4791-942b-31997fe6d3ee","Type":"ContainerDied","Data":"8f12008bce00b371efa068a390d155a157414319e4353cbc87cc675aa03e1576"} Dec 10 06:51:44 crc kubenswrapper[4765]: I1210 06:51:44.196554 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-q9xg4" podUID="2f0fa6f1-2039-4003-88b8-d0fccce70b29" containerName="registry-server" probeResult="failure" output=< Dec 10 06:51:44 crc kubenswrapper[4765]: timeout: failed to connect service ":50051" within 1s Dec 10 06:51:44 crc kubenswrapper[4765]: > Dec 10 06:51:45 crc kubenswrapper[4765]: I1210 06:51:45.168732 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cqthw" event={"ID":"2a320eac-17e0-42a1-8e4d-3f1c87a72e92","Type":"ContainerStarted","Data":"5cad3eef6bd7b89c1267f8190848b11209cced6aeb8c7df852b429d820315c46"} Dec 10 06:51:45 crc kubenswrapper[4765]: I1210 06:51:45.171870 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l9jw8" event={"ID":"a7909736-a9e6-4791-942b-31997fe6d3ee","Type":"ContainerStarted","Data":"ec1583c94152aa971f49d36919d257a2219914d1a11518add38a26ba3dfec47c"} Dec 10 06:51:45 crc kubenswrapper[4765]: I1210 06:51:45.174298 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k44ps" event={"ID":"7b4761b3-3632-4441-897d-4ab2635b7630","Type":"ContainerStarted","Data":"7a7d09951edf10fb94fb603572873743d3c763ce3e8e4ed30b5d3ffcdb6d3682"} Dec 10 06:51:45 crc kubenswrapper[4765]: I1210 06:51:45.176322 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dzrqt" event={"ID":"c073bf0a-0806-4af1-9902-a0fe221901be","Type":"ContainerStarted","Data":"c6917d1dc420413aa4334cc8c7d6f31fcacdee520adfc87528b0636c3b446960"} Dec 10 06:51:45 crc kubenswrapper[4765]: I1210 06:51:45.178502 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mklh7" event={"ID":"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55","Type":"ContainerStarted","Data":"f140964dc403aa01df86b472c12d94b87bf7067d83dd9e69910d2594529cb4a8"} Dec 10 06:51:45 crc kubenswrapper[4765]: I1210 06:51:45.183662 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-98pgv" event={"ID":"c5fd4051-3069-4ca7-829d-ad57d3d50c4c","Type":"ContainerStarted","Data":"13e332386d42cd7ea52c282ca9cfdf2bd30efb9d2c85c6784711c810e1425119"} Dec 10 06:51:45 crc kubenswrapper[4765]: I1210 06:51:45.186406 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vcct8" event={"ID":"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994","Type":"ContainerStarted","Data":"a021a784b588d700a80362ffabfbec88d1760efafe97ea2989b507b6b3c3f2b1"} Dec 10 06:51:45 crc kubenswrapper[4765]: I1210 06:51:45.216714 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cqthw" podStartSLOduration=4.419235655 podStartE2EDuration="1m21.216695419s" podCreationTimestamp="2025-12-10 06:50:24 +0000 UTC" firstStartedPulling="2025-12-10 06:50:27.013376943 +0000 UTC m=+146.740042259" lastFinishedPulling="2025-12-10 06:51:43.810836707 +0000 UTC m=+223.537502023" observedRunningTime="2025-12-10 06:51:45.193798136 +0000 UTC m=+224.920463462" watchObservedRunningTime="2025-12-10 06:51:45.216695419 +0000 UTC m=+224.943360735" Dec 10 06:51:45 crc kubenswrapper[4765]: I1210 06:51:45.219355 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-l9jw8" podStartSLOduration=5.370038095 podStartE2EDuration="1m24.219341284s" podCreationTimestamp="2025-12-10 06:50:21 +0000 UTC" firstStartedPulling="2025-12-10 06:50:25.874851468 +0000 UTC m=+145.601516784" lastFinishedPulling="2025-12-10 06:51:44.724154657 +0000 UTC m=+224.450819973" observedRunningTime="2025-12-10 06:51:45.214806446 +0000 UTC m=+224.941471762" watchObservedRunningTime="2025-12-10 06:51:45.219341284 +0000 UTC m=+224.946006600" Dec 10 06:51:45 crc kubenswrapper[4765]: I1210 06:51:45.244873 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-98pgv" podStartSLOduration=5.3742628 podStartE2EDuration="1m21.24485611s" podCreationTimestamp="2025-12-10 06:50:24 +0000 UTC" firstStartedPulling="2025-12-10 06:50:28.060462433 +0000 UTC m=+147.787127769" lastFinishedPulling="2025-12-10 06:51:43.931055763 +0000 UTC m=+223.657721079" observedRunningTime="2025-12-10 06:51:45.244072058 +0000 UTC m=+224.970737374" watchObservedRunningTime="2025-12-10 06:51:45.24485611 +0000 UTC m=+224.971521426" Dec 10 06:51:45 crc kubenswrapper[4765]: I1210 06:51:45.268166 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vcct8" podStartSLOduration=5.410461477 podStartE2EDuration="1m21.268149664s" podCreationTimestamp="2025-12-10 06:50:24 +0000 UTC" firstStartedPulling="2025-12-10 06:50:28.044785764 +0000 UTC m=+147.771451080" lastFinishedPulling="2025-12-10 06:51:43.902473951 +0000 UTC m=+223.629139267" observedRunningTime="2025-12-10 06:51:45.262608579 +0000 UTC m=+224.989273895" watchObservedRunningTime="2025-12-10 06:51:45.268149664 +0000 UTC m=+224.994814980" Dec 10 06:51:45 crc kubenswrapper[4765]: I1210 06:51:45.282902 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dzrqt" podStartSLOduration=4.092229935 podStartE2EDuration="1m24.282886988s" podCreationTimestamp="2025-12-10 06:50:21 +0000 UTC" firstStartedPulling="2025-12-10 06:50:24.427171678 +0000 UTC m=+144.153836994" lastFinishedPulling="2025-12-10 06:51:44.617828721 +0000 UTC m=+224.344494047" observedRunningTime="2025-12-10 06:51:45.281066337 +0000 UTC m=+225.007731653" watchObservedRunningTime="2025-12-10 06:51:45.282886988 +0000 UTC m=+225.009552294" Dec 10 06:51:45 crc kubenswrapper[4765]: I1210 06:51:45.311285 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mklh7" podStartSLOduration=5.4900581840000005 podStartE2EDuration="1m22.311259815s" podCreationTimestamp="2025-12-10 06:50:23 +0000 UTC" firstStartedPulling="2025-12-10 06:50:27.01397616 +0000 UTC m=+146.740641476" lastFinishedPulling="2025-12-10 06:51:43.835177791 +0000 UTC m=+223.561843107" observedRunningTime="2025-12-10 06:51:45.301245784 +0000 UTC m=+225.027911100" watchObservedRunningTime="2025-12-10 06:51:45.311259815 +0000 UTC m=+225.037925131" Dec 10 06:51:45 crc kubenswrapper[4765]: I1210 06:51:45.332626 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-k44ps" podStartSLOduration=4.513495592 podStartE2EDuration="1m23.332610455s" podCreationTimestamp="2025-12-10 06:50:22 +0000 UTC" firstStartedPulling="2025-12-10 06:50:25.925092397 +0000 UTC m=+145.651757713" lastFinishedPulling="2025-12-10 06:51:44.74420726 +0000 UTC m=+224.470872576" observedRunningTime="2025-12-10 06:51:45.329033864 +0000 UTC m=+225.055699190" watchObservedRunningTime="2025-12-10 06:51:45.332610455 +0000 UTC m=+225.059275771" Dec 10 06:51:45 crc kubenswrapper[4765]: I1210 06:51:45.363759 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vcct8" Dec 10 06:51:45 crc kubenswrapper[4765]: I1210 06:51:45.363808 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vcct8" Dec 10 06:51:46 crc kubenswrapper[4765]: I1210 06:51:46.357305 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-ljp6b"] Dec 10 06:51:46 crc kubenswrapper[4765]: I1210 06:51:46.423707 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vcct8" podUID="4f7e189b-f64e-4a06-b5ac-ec94f0f3c994" containerName="registry-server" probeResult="failure" output=< Dec 10 06:51:46 crc kubenswrapper[4765]: timeout: failed to connect service ":50051" within 1s Dec 10 06:51:46 crc kubenswrapper[4765]: > Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.510848 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k44ps"] Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.511577 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-k44ps" podUID="7b4761b3-3632-4441-897d-4ab2635b7630" containerName="registry-server" containerID="cri-o://7a7d09951edf10fb94fb603572873743d3c763ce3e8e4ed30b5d3ffcdb6d3682" gracePeriod=30 Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.523039 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l9jw8"] Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.523322 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-l9jw8" podUID="a7909736-a9e6-4791-942b-31997fe6d3ee" containerName="registry-server" containerID="cri-o://ec1583c94152aa971f49d36919d257a2219914d1a11518add38a26ba3dfec47c" gracePeriod=30 Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.529140 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dzrqt"] Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.529610 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dzrqt" podUID="c073bf0a-0806-4af1-9902-a0fe221901be" containerName="registry-server" containerID="cri-o://c6917d1dc420413aa4334cc8c7d6f31fcacdee520adfc87528b0636c3b446960" gracePeriod=30 Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.553823 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q9xg4"] Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.554261 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-q9xg4" podUID="2f0fa6f1-2039-4003-88b8-d0fccce70b29" containerName="registry-server" containerID="cri-o://33185da7922357082c832fb0ac239898c5cd6f5f50d8c867f56e121266916ebc" gracePeriod=30 Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.557361 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bkcmk"] Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.557702 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" podUID="7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76" containerName="marketplace-operator" containerID="cri-o://23af2783bcb23ff155bea99d928b904a0770871325a29f8248dbef8e59a05875" gracePeriod=30 Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.574545 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cqthw"] Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.575420 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-cqthw" podUID="2a320eac-17e0-42a1-8e4d-3f1c87a72e92" containerName="registry-server" containerID="cri-o://5cad3eef6bd7b89c1267f8190848b11209cced6aeb8c7df852b429d820315c46" gracePeriod=30 Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.577463 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-sxwbc"] Dec 10 06:51:47 crc kubenswrapper[4765]: E1210 06:51:47.577767 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12e10e8d-19ff-459c-8215-2aeb1a493e4e" containerName="pruner" Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.577794 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="12e10e8d-19ff-459c-8215-2aeb1a493e4e" containerName="pruner" Dec 10 06:51:47 crc kubenswrapper[4765]: E1210 06:51:47.577826 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81fb3942-2175-4415-83ab-1ecc3ec73745" containerName="pruner" Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.577836 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="81fb3942-2175-4415-83ab-1ecc3ec73745" containerName="pruner" Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.577965 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="81fb3942-2175-4415-83ab-1ecc3ec73745" containerName="pruner" Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.577991 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="12e10e8d-19ff-459c-8215-2aeb1a493e4e" containerName="pruner" Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.578493 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-sxwbc" Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.582677 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mklh7"] Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.582988 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mklh7" podUID="1d0a35e0-faa1-4efe-9861-75f4d1fa7f55" containerName="registry-server" containerID="cri-o://f140964dc403aa01df86b472c12d94b87bf7067d83dd9e69910d2594529cb4a8" gracePeriod=30 Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.594347 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e5f2035f-97a3-4b63-ac9d-42ed0b201eec-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-sxwbc\" (UID: \"e5f2035f-97a3-4b63-ac9d-42ed0b201eec\") " pod="openshift-marketplace/marketplace-operator-79b997595-sxwbc" Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.594746 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e5f2035f-97a3-4b63-ac9d-42ed0b201eec-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-sxwbc\" (UID: \"e5f2035f-97a3-4b63-ac9d-42ed0b201eec\") " pod="openshift-marketplace/marketplace-operator-79b997595-sxwbc" Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.594859 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mhdk\" (UniqueName: \"kubernetes.io/projected/e5f2035f-97a3-4b63-ac9d-42ed0b201eec-kube-api-access-7mhdk\") pod \"marketplace-operator-79b997595-sxwbc\" (UID: \"e5f2035f-97a3-4b63-ac9d-42ed0b201eec\") " pod="openshift-marketplace/marketplace-operator-79b997595-sxwbc" Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.595870 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-98pgv"] Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.596104 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-98pgv" podUID="c5fd4051-3069-4ca7-829d-ad57d3d50c4c" containerName="registry-server" containerID="cri-o://13e332386d42cd7ea52c282ca9cfdf2bd30efb9d2c85c6784711c810e1425119" gracePeriod=30 Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.603937 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-sxwbc"] Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.608277 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vcct8"] Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.608607 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vcct8" podUID="4f7e189b-f64e-4a06-b5ac-ec94f0f3c994" containerName="registry-server" containerID="cri-o://a021a784b588d700a80362ffabfbec88d1760efafe97ea2989b507b6b3c3f2b1" gracePeriod=30 Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.695876 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mhdk\" (UniqueName: \"kubernetes.io/projected/e5f2035f-97a3-4b63-ac9d-42ed0b201eec-kube-api-access-7mhdk\") pod \"marketplace-operator-79b997595-sxwbc\" (UID: \"e5f2035f-97a3-4b63-ac9d-42ed0b201eec\") " pod="openshift-marketplace/marketplace-operator-79b997595-sxwbc" Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.695946 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e5f2035f-97a3-4b63-ac9d-42ed0b201eec-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-sxwbc\" (UID: \"e5f2035f-97a3-4b63-ac9d-42ed0b201eec\") " pod="openshift-marketplace/marketplace-operator-79b997595-sxwbc" Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.695963 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e5f2035f-97a3-4b63-ac9d-42ed0b201eec-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-sxwbc\" (UID: \"e5f2035f-97a3-4b63-ac9d-42ed0b201eec\") " pod="openshift-marketplace/marketplace-operator-79b997595-sxwbc" Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.697555 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e5f2035f-97a3-4b63-ac9d-42ed0b201eec-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-sxwbc\" (UID: \"e5f2035f-97a3-4b63-ac9d-42ed0b201eec\") " pod="openshift-marketplace/marketplace-operator-79b997595-sxwbc" Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.705322 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e5f2035f-97a3-4b63-ac9d-42ed0b201eec-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-sxwbc\" (UID: \"e5f2035f-97a3-4b63-ac9d-42ed0b201eec\") " pod="openshift-marketplace/marketplace-operator-79b997595-sxwbc" Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.712531 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mhdk\" (UniqueName: \"kubernetes.io/projected/e5f2035f-97a3-4b63-ac9d-42ed0b201eec-kube-api-access-7mhdk\") pod \"marketplace-operator-79b997595-sxwbc\" (UID: \"e5f2035f-97a3-4b63-ac9d-42ed0b201eec\") " pod="openshift-marketplace/marketplace-operator-79b997595-sxwbc" Dec 10 06:51:47 crc kubenswrapper[4765]: I1210 06:51:47.918576 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-sxwbc" Dec 10 06:51:48 crc kubenswrapper[4765]: I1210 06:51:48.357817 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-sxwbc"] Dec 10 06:51:48 crc kubenswrapper[4765]: I1210 06:51:48.922550 4765 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-bkcmk container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Dec 10 06:51:48 crc kubenswrapper[4765]: I1210 06:51:48.922650 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" podUID="7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Dec 10 06:51:49 crc kubenswrapper[4765]: I1210 06:51:49.207177 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-sxwbc" event={"ID":"e5f2035f-97a3-4b63-ac9d-42ed0b201eec","Type":"ContainerStarted","Data":"d7a2a1c713b2758a04f3a11e7fadd368cc316e799581cf3318e14601c35ae1f5"} Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.215264 4765 generic.go:334] "Generic (PLEG): container finished" podID="2f0fa6f1-2039-4003-88b8-d0fccce70b29" containerID="33185da7922357082c832fb0ac239898c5cd6f5f50d8c867f56e121266916ebc" exitCode=0 Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.215348 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q9xg4" event={"ID":"2f0fa6f1-2039-4003-88b8-d0fccce70b29","Type":"ContainerDied","Data":"33185da7922357082c832fb0ac239898c5cd6f5f50d8c867f56e121266916ebc"} Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.217671 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-sxwbc" event={"ID":"e5f2035f-97a3-4b63-ac9d-42ed0b201eec","Type":"ContainerStarted","Data":"7e6c8682cb53952aea0bc8d01745aba8ba2a396728dcb5e3f73de34a12e46c7c"} Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.219549 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vcct8_4f7e189b-f64e-4a06-b5ac-ec94f0f3c994/registry-server/0.log" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.220233 4765 generic.go:334] "Generic (PLEG): container finished" podID="4f7e189b-f64e-4a06-b5ac-ec94f0f3c994" containerID="a021a784b588d700a80362ffabfbec88d1760efafe97ea2989b507b6b3c3f2b1" exitCode=1 Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.220322 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vcct8" event={"ID":"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994","Type":"ContainerDied","Data":"a021a784b588d700a80362ffabfbec88d1760efafe97ea2989b507b6b3c3f2b1"} Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.221984 4765 generic.go:334] "Generic (PLEG): container finished" podID="2a320eac-17e0-42a1-8e4d-3f1c87a72e92" containerID="5cad3eef6bd7b89c1267f8190848b11209cced6aeb8c7df852b429d820315c46" exitCode=0 Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.222053 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cqthw" event={"ID":"2a320eac-17e0-42a1-8e4d-3f1c87a72e92","Type":"ContainerDied","Data":"5cad3eef6bd7b89c1267f8190848b11209cced6aeb8c7df852b429d820315c46"} Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.224142 4765 generic.go:334] "Generic (PLEG): container finished" podID="a7909736-a9e6-4791-942b-31997fe6d3ee" containerID="ec1583c94152aa971f49d36919d257a2219914d1a11518add38a26ba3dfec47c" exitCode=0 Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.224222 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l9jw8" event={"ID":"a7909736-a9e6-4791-942b-31997fe6d3ee","Type":"ContainerDied","Data":"ec1583c94152aa971f49d36919d257a2219914d1a11518add38a26ba3dfec47c"} Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.225903 4765 generic.go:334] "Generic (PLEG): container finished" podID="7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76" containerID="23af2783bcb23ff155bea99d928b904a0770871325a29f8248dbef8e59a05875" exitCode=0 Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.225978 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" event={"ID":"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76","Type":"ContainerDied","Data":"23af2783bcb23ff155bea99d928b904a0770871325a29f8248dbef8e59a05875"} Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.229197 4765 generic.go:334] "Generic (PLEG): container finished" podID="7b4761b3-3632-4441-897d-4ab2635b7630" containerID="7a7d09951edf10fb94fb603572873743d3c763ce3e8e4ed30b5d3ffcdb6d3682" exitCode=0 Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.229278 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k44ps" event={"ID":"7b4761b3-3632-4441-897d-4ab2635b7630","Type":"ContainerDied","Data":"7a7d09951edf10fb94fb603572873743d3c763ce3e8e4ed30b5d3ffcdb6d3682"} Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.231381 4765 generic.go:334] "Generic (PLEG): container finished" podID="c073bf0a-0806-4af1-9902-a0fe221901be" containerID="c6917d1dc420413aa4334cc8c7d6f31fcacdee520adfc87528b0636c3b446960" exitCode=0 Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.231440 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dzrqt" event={"ID":"c073bf0a-0806-4af1-9902-a0fe221901be","Type":"ContainerDied","Data":"c6917d1dc420413aa4334cc8c7d6f31fcacdee520adfc87528b0636c3b446960"} Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.233308 4765 generic.go:334] "Generic (PLEG): container finished" podID="1d0a35e0-faa1-4efe-9861-75f4d1fa7f55" containerID="f140964dc403aa01df86b472c12d94b87bf7067d83dd9e69910d2594529cb4a8" exitCode=0 Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.233371 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mklh7" event={"ID":"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55","Type":"ContainerDied","Data":"f140964dc403aa01df86b472c12d94b87bf7067d83dd9e69910d2594529cb4a8"} Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.234797 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-98pgv_c5fd4051-3069-4ca7-829d-ad57d3d50c4c/registry-server/0.log" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.235642 4765 generic.go:334] "Generic (PLEG): container finished" podID="c5fd4051-3069-4ca7-829d-ad57d3d50c4c" containerID="13e332386d42cd7ea52c282ca9cfdf2bd30efb9d2c85c6784711c810e1425119" exitCode=1 Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.235672 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-98pgv" event={"ID":"c5fd4051-3069-4ca7-829d-ad57d3d50c4c","Type":"ContainerDied","Data":"13e332386d42cd7ea52c282ca9cfdf2bd30efb9d2c85c6784711c810e1425119"} Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.695756 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dzrqt" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.740675 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c073bf0a-0806-4af1-9902-a0fe221901be-utilities\") pod \"c073bf0a-0806-4af1-9902-a0fe221901be\" (UID: \"c073bf0a-0806-4af1-9902-a0fe221901be\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.740821 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8xss\" (UniqueName: \"kubernetes.io/projected/c073bf0a-0806-4af1-9902-a0fe221901be-kube-api-access-n8xss\") pod \"c073bf0a-0806-4af1-9902-a0fe221901be\" (UID: \"c073bf0a-0806-4af1-9902-a0fe221901be\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.740921 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c073bf0a-0806-4af1-9902-a0fe221901be-catalog-content\") pod \"c073bf0a-0806-4af1-9902-a0fe221901be\" (UID: \"c073bf0a-0806-4af1-9902-a0fe221901be\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.742937 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c073bf0a-0806-4af1-9902-a0fe221901be-utilities" (OuterVolumeSpecName: "utilities") pod "c073bf0a-0806-4af1-9902-a0fe221901be" (UID: "c073bf0a-0806-4af1-9902-a0fe221901be"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.752299 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c073bf0a-0806-4af1-9902-a0fe221901be-kube-api-access-n8xss" (OuterVolumeSpecName: "kube-api-access-n8xss") pod "c073bf0a-0806-4af1-9902-a0fe221901be" (UID: "c073bf0a-0806-4af1-9902-a0fe221901be"). InnerVolumeSpecName "kube-api-access-n8xss". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.817349 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c073bf0a-0806-4af1-9902-a0fe221901be-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c073bf0a-0806-4af1-9902-a0fe221901be" (UID: "c073bf0a-0806-4af1-9902-a0fe221901be"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.842924 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c073bf0a-0806-4af1-9902-a0fe221901be-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.842970 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c073bf0a-0806-4af1-9902-a0fe221901be-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.842984 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8xss\" (UniqueName: \"kubernetes.io/projected/c073bf0a-0806-4af1-9902-a0fe221901be-kube-api-access-n8xss\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.881984 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l9jw8" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.887935 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cqthw" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.893169 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mklh7" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.900676 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.905342 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q9xg4" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.915388 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vcct8_4f7e189b-f64e-4a06-b5ac-ec94f0f3c994/registry-server/0.log" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.916578 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vcct8" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.922732 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k44ps" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.925699 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-98pgv_c5fd4051-3069-4ca7-829d-ad57d3d50c4c/registry-server/0.log" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.926523 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-98pgv" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944214 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7909736-a9e6-4791-942b-31997fe6d3ee-catalog-content\") pod \"a7909736-a9e6-4791-942b-31997fe6d3ee\" (UID: \"a7909736-a9e6-4791-942b-31997fe6d3ee\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944265 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvljv\" (UniqueName: \"kubernetes.io/projected/a7909736-a9e6-4791-942b-31997fe6d3ee-kube-api-access-gvljv\") pod \"a7909736-a9e6-4791-942b-31997fe6d3ee\" (UID: \"a7909736-a9e6-4791-942b-31997fe6d3ee\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944291 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6w68\" (UniqueName: \"kubernetes.io/projected/2a320eac-17e0-42a1-8e4d-3f1c87a72e92-kube-api-access-t6w68\") pod \"2a320eac-17e0-42a1-8e4d-3f1c87a72e92\" (UID: \"2a320eac-17e0-42a1-8e4d-3f1c87a72e92\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944313 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rvxf\" (UniqueName: \"kubernetes.io/projected/7b4761b3-3632-4441-897d-4ab2635b7630-kube-api-access-6rvxf\") pod \"7b4761b3-3632-4441-897d-4ab2635b7630\" (UID: \"7b4761b3-3632-4441-897d-4ab2635b7630\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944335 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b4761b3-3632-4441-897d-4ab2635b7630-utilities\") pod \"7b4761b3-3632-4441-897d-4ab2635b7630\" (UID: \"7b4761b3-3632-4441-897d-4ab2635b7630\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944354 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55-utilities\") pod \"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55\" (UID: \"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944385 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76-marketplace-operator-metrics\") pod \"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76\" (UID: \"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944409 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f0fa6f1-2039-4003-88b8-d0fccce70b29-catalog-content\") pod \"2f0fa6f1-2039-4003-88b8-d0fccce70b29\" (UID: \"2f0fa6f1-2039-4003-88b8-d0fccce70b29\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944440 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a320eac-17e0-42a1-8e4d-3f1c87a72e92-utilities\") pod \"2a320eac-17e0-42a1-8e4d-3f1c87a72e92\" (UID: \"2a320eac-17e0-42a1-8e4d-3f1c87a72e92\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944461 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a320eac-17e0-42a1-8e4d-3f1c87a72e92-catalog-content\") pod \"2a320eac-17e0-42a1-8e4d-3f1c87a72e92\" (UID: \"2a320eac-17e0-42a1-8e4d-3f1c87a72e92\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944483 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55-catalog-content\") pod \"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55\" (UID: \"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944514 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrtnk\" (UniqueName: \"kubernetes.io/projected/c5fd4051-3069-4ca7-829d-ad57d3d50c4c-kube-api-access-hrtnk\") pod \"c5fd4051-3069-4ca7-829d-ad57d3d50c4c\" (UID: \"c5fd4051-3069-4ca7-829d-ad57d3d50c4c\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944537 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5fd4051-3069-4ca7-829d-ad57d3d50c4c-catalog-content\") pod \"c5fd4051-3069-4ca7-829d-ad57d3d50c4c\" (UID: \"c5fd4051-3069-4ca7-829d-ad57d3d50c4c\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944563 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gfzb\" (UniqueName: \"kubernetes.io/projected/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76-kube-api-access-9gfzb\") pod \"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76\" (UID: \"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944585 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76-marketplace-trusted-ca\") pod \"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76\" (UID: \"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944608 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7spmh\" (UniqueName: \"kubernetes.io/projected/2f0fa6f1-2039-4003-88b8-d0fccce70b29-kube-api-access-7spmh\") pod \"2f0fa6f1-2039-4003-88b8-d0fccce70b29\" (UID: \"2f0fa6f1-2039-4003-88b8-d0fccce70b29\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944631 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994-utilities\") pod \"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994\" (UID: \"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944653 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994-catalog-content\") pod \"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994\" (UID: \"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944687 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xx5dg\" (UniqueName: \"kubernetes.io/projected/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994-kube-api-access-xx5dg\") pod \"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994\" (UID: \"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944714 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7909736-a9e6-4791-942b-31997fe6d3ee-utilities\") pod \"a7909736-a9e6-4791-942b-31997fe6d3ee\" (UID: \"a7909736-a9e6-4791-942b-31997fe6d3ee\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944739 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b4761b3-3632-4441-897d-4ab2635b7630-catalog-content\") pod \"7b4761b3-3632-4441-897d-4ab2635b7630\" (UID: \"7b4761b3-3632-4441-897d-4ab2635b7630\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944764 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5fd4051-3069-4ca7-829d-ad57d3d50c4c-utilities\") pod \"c5fd4051-3069-4ca7-829d-ad57d3d50c4c\" (UID: \"c5fd4051-3069-4ca7-829d-ad57d3d50c4c\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944792 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkgcm\" (UniqueName: \"kubernetes.io/projected/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55-kube-api-access-tkgcm\") pod \"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55\" (UID: \"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.944813 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f0fa6f1-2039-4003-88b8-d0fccce70b29-utilities\") pod \"2f0fa6f1-2039-4003-88b8-d0fccce70b29\" (UID: \"2f0fa6f1-2039-4003-88b8-d0fccce70b29\") " Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.947133 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f0fa6f1-2039-4003-88b8-d0fccce70b29-utilities" (OuterVolumeSpecName: "utilities") pod "2f0fa6f1-2039-4003-88b8-d0fccce70b29" (UID: "2f0fa6f1-2039-4003-88b8-d0fccce70b29"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.952471 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b4761b3-3632-4441-897d-4ab2635b7630-utilities" (OuterVolumeSpecName: "utilities") pod "7b4761b3-3632-4441-897d-4ab2635b7630" (UID: "7b4761b3-3632-4441-897d-4ab2635b7630"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.953215 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76" (UID: "7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.955010 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5fd4051-3069-4ca7-829d-ad57d3d50c4c-utilities" (OuterVolumeSpecName: "utilities") pod "c5fd4051-3069-4ca7-829d-ad57d3d50c4c" (UID: "c5fd4051-3069-4ca7-829d-ad57d3d50c4c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.956106 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a320eac-17e0-42a1-8e4d-3f1c87a72e92-utilities" (OuterVolumeSpecName: "utilities") pod "2a320eac-17e0-42a1-8e4d-3f1c87a72e92" (UID: "2a320eac-17e0-42a1-8e4d-3f1c87a72e92"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.956858 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5fd4051-3069-4ca7-829d-ad57d3d50c4c-kube-api-access-hrtnk" (OuterVolumeSpecName: "kube-api-access-hrtnk") pod "c5fd4051-3069-4ca7-829d-ad57d3d50c4c" (UID: "c5fd4051-3069-4ca7-829d-ad57d3d50c4c"). InnerVolumeSpecName "kube-api-access-hrtnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.956918 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a320eac-17e0-42a1-8e4d-3f1c87a72e92-kube-api-access-t6w68" (OuterVolumeSpecName: "kube-api-access-t6w68") pod "2a320eac-17e0-42a1-8e4d-3f1c87a72e92" (UID: "2a320eac-17e0-42a1-8e4d-3f1c87a72e92"). InnerVolumeSpecName "kube-api-access-t6w68". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.956938 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b4761b3-3632-4441-897d-4ab2635b7630-kube-api-access-6rvxf" (OuterVolumeSpecName: "kube-api-access-6rvxf") pod "7b4761b3-3632-4441-897d-4ab2635b7630" (UID: "7b4761b3-3632-4441-897d-4ab2635b7630"). InnerVolumeSpecName "kube-api-access-6rvxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.957056 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f0fa6f1-2039-4003-88b8-d0fccce70b29-kube-api-access-7spmh" (OuterVolumeSpecName: "kube-api-access-7spmh") pod "2f0fa6f1-2039-4003-88b8-d0fccce70b29" (UID: "2f0fa6f1-2039-4003-88b8-d0fccce70b29"). InnerVolumeSpecName "kube-api-access-7spmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.957540 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7909736-a9e6-4791-942b-31997fe6d3ee-kube-api-access-gvljv" (OuterVolumeSpecName: "kube-api-access-gvljv") pod "a7909736-a9e6-4791-942b-31997fe6d3ee" (UID: "a7909736-a9e6-4791-942b-31997fe6d3ee"). InnerVolumeSpecName "kube-api-access-gvljv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.958991 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994-utilities" (OuterVolumeSpecName: "utilities") pod "4f7e189b-f64e-4a06-b5ac-ec94f0f3c994" (UID: "4f7e189b-f64e-4a06-b5ac-ec94f0f3c994"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.960224 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55-kube-api-access-tkgcm" (OuterVolumeSpecName: "kube-api-access-tkgcm") pod "1d0a35e0-faa1-4efe-9861-75f4d1fa7f55" (UID: "1d0a35e0-faa1-4efe-9861-75f4d1fa7f55"). InnerVolumeSpecName "kube-api-access-tkgcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.962313 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7909736-a9e6-4791-942b-31997fe6d3ee-utilities" (OuterVolumeSpecName: "utilities") pod "a7909736-a9e6-4791-942b-31997fe6d3ee" (UID: "a7909736-a9e6-4791-942b-31997fe6d3ee"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.962906 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994-kube-api-access-xx5dg" (OuterVolumeSpecName: "kube-api-access-xx5dg") pod "4f7e189b-f64e-4a06-b5ac-ec94f0f3c994" (UID: "4f7e189b-f64e-4a06-b5ac-ec94f0f3c994"). InnerVolumeSpecName "kube-api-access-xx5dg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.964762 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55-utilities" (OuterVolumeSpecName: "utilities") pod "1d0a35e0-faa1-4efe-9861-75f4d1fa7f55" (UID: "1d0a35e0-faa1-4efe-9861-75f4d1fa7f55"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.971254 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76-kube-api-access-9gfzb" (OuterVolumeSpecName: "kube-api-access-9gfzb") pod "7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76" (UID: "7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76"). InnerVolumeSpecName "kube-api-access-9gfzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.990485 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d0a35e0-faa1-4efe-9861-75f4d1fa7f55" (UID: "1d0a35e0-faa1-4efe-9861-75f4d1fa7f55"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:51:50 crc kubenswrapper[4765]: I1210 06:51:50.992558 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76" (UID: "7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.011039 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a320eac-17e0-42a1-8e4d-3f1c87a72e92-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2a320eac-17e0-42a1-8e4d-3f1c87a72e92" (UID: "2a320eac-17e0-42a1-8e4d-3f1c87a72e92"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046197 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a320eac-17e0-42a1-8e4d-3f1c87a72e92-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046229 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a320eac-17e0-42a1-8e4d-3f1c87a72e92-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046243 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046252 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrtnk\" (UniqueName: \"kubernetes.io/projected/c5fd4051-3069-4ca7-829d-ad57d3d50c4c-kube-api-access-hrtnk\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046262 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gfzb\" (UniqueName: \"kubernetes.io/projected/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76-kube-api-access-9gfzb\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046271 4765 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046282 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7spmh\" (UniqueName: \"kubernetes.io/projected/2f0fa6f1-2039-4003-88b8-d0fccce70b29-kube-api-access-7spmh\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046291 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046299 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xx5dg\" (UniqueName: \"kubernetes.io/projected/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994-kube-api-access-xx5dg\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046306 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7909736-a9e6-4791-942b-31997fe6d3ee-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046317 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5fd4051-3069-4ca7-829d-ad57d3d50c4c-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046325 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkgcm\" (UniqueName: \"kubernetes.io/projected/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55-kube-api-access-tkgcm\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046334 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f0fa6f1-2039-4003-88b8-d0fccce70b29-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046342 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvljv\" (UniqueName: \"kubernetes.io/projected/a7909736-a9e6-4791-942b-31997fe6d3ee-kube-api-access-gvljv\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046354 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6w68\" (UniqueName: \"kubernetes.io/projected/2a320eac-17e0-42a1-8e4d-3f1c87a72e92-kube-api-access-t6w68\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046362 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rvxf\" (UniqueName: \"kubernetes.io/projected/7b4761b3-3632-4441-897d-4ab2635b7630-kube-api-access-6rvxf\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046371 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b4761b3-3632-4441-897d-4ab2635b7630-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046381 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.046389 4765 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.052222 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7909736-a9e6-4791-942b-31997fe6d3ee-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a7909736-a9e6-4791-942b-31997fe6d3ee" (UID: "a7909736-a9e6-4791-942b-31997fe6d3ee"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.055726 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b4761b3-3632-4441-897d-4ab2635b7630-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7b4761b3-3632-4441-897d-4ab2635b7630" (UID: "7b4761b3-3632-4441-897d-4ab2635b7630"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.059679 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f0fa6f1-2039-4003-88b8-d0fccce70b29-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2f0fa6f1-2039-4003-88b8-d0fccce70b29" (UID: "2f0fa6f1-2039-4003-88b8-d0fccce70b29"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.145747 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5fd4051-3069-4ca7-829d-ad57d3d50c4c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c5fd4051-3069-4ca7-829d-ad57d3d50c4c" (UID: "c5fd4051-3069-4ca7-829d-ad57d3d50c4c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.147728 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7909736-a9e6-4791-942b-31997fe6d3ee-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.147755 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f0fa6f1-2039-4003-88b8-d0fccce70b29-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.147764 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5fd4051-3069-4ca7-829d-ad57d3d50c4c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.147773 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b4761b3-3632-4441-897d-4ab2635b7630-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.162797 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4f7e189b-f64e-4a06-b5ac-ec94f0f3c994" (UID: "4f7e189b-f64e-4a06-b5ac-ec94f0f3c994"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.242491 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q9xg4" event={"ID":"2f0fa6f1-2039-4003-88b8-d0fccce70b29","Type":"ContainerDied","Data":"4b8cd164b032fa5650b2137f665f26db16f803f003a42db8c3098e5e9c365a7f"} Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.242550 4765 scope.go:117] "RemoveContainer" containerID="33185da7922357082c832fb0ac239898c5cd6f5f50d8c867f56e121266916ebc" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.242517 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q9xg4" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.245435 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k44ps" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.245467 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k44ps" event={"ID":"7b4761b3-3632-4441-897d-4ab2635b7630","Type":"ContainerDied","Data":"e6f6af63def80df10d8a0eb3295a61bf274a25f66c2b50e2a08e9e403fecc8db"} Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.248496 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dzrqt" event={"ID":"c073bf0a-0806-4af1-9902-a0fe221901be","Type":"ContainerDied","Data":"2fecd9b0c29f22b3ea3c4ac932734c41d2ce4f855da1f85d66d2fc37173232c5"} Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.248637 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.248665 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dzrqt" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.251461 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" event={"ID":"7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76","Type":"ContainerDied","Data":"16badef4290b83ab838080287dd0114a85ddf260647c96f2d33d29c603862e91"} Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.251551 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bkcmk" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.257951 4765 scope.go:117] "RemoveContainer" containerID="6c5cf3f5ee38ba12376accdedeb395de9733bae1a9a6defe6c20d9cb199c0f45" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.258770 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vcct8_4f7e189b-f64e-4a06-b5ac-ec94f0f3c994/registry-server/0.log" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.261008 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vcct8" event={"ID":"4f7e189b-f64e-4a06-b5ac-ec94f0f3c994","Type":"ContainerDied","Data":"39ff5c7158924561f24cbb6fc8c858771320408d5c937ae902eaa8910fa73e0e"} Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.261037 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vcct8" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.263944 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cqthw" event={"ID":"2a320eac-17e0-42a1-8e4d-3f1c87a72e92","Type":"ContainerDied","Data":"8f07080714043f9b75c0886fac0d25dc79f17bcbb5180b601f2cbd25a04a4cb0"} Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.264030 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cqthw" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.268564 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l9jw8" event={"ID":"a7909736-a9e6-4791-942b-31997fe6d3ee","Type":"ContainerDied","Data":"1cdf55962c6900ead26b3954f0ceac8318e636f4299ea1fd7ba81692df1594b6"} Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.268803 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l9jw8" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.277967 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mklh7" event={"ID":"1d0a35e0-faa1-4efe-9861-75f4d1fa7f55","Type":"ContainerDied","Data":"ec8d4b9245abe845851ff6082af6bb369c6b11cb884d254afb23c6e43dd3b71c"} Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.278074 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mklh7" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.278909 4765 scope.go:117] "RemoveContainer" containerID="4dc1e9d9c5f42658598a387e8485de5b11251fc44a437e692ddaae30a46e93f5" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.280739 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q9xg4"] Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.286944 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-q9xg4"] Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.287423 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-98pgv_c5fd4051-3069-4ca7-829d-ad57d3d50c4c/registry-server/0.log" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.288350 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-98pgv" event={"ID":"c5fd4051-3069-4ca7-829d-ad57d3d50c4c","Type":"ContainerDied","Data":"44c2c12afdd78c5b60915bbf4f47b43a7bae89ae6f596a292b499c0495074d5a"} Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.288539 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-sxwbc" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.291207 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-sxwbc" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.291514 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-98pgv" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.303490 4765 scope.go:117] "RemoveContainer" containerID="7a7d09951edf10fb94fb603572873743d3c763ce3e8e4ed30b5d3ffcdb6d3682" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.308223 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bkcmk"] Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.313648 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bkcmk"] Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.317517 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dzrqt"] Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.318914 4765 scope.go:117] "RemoveContainer" containerID="36eb4ed04d06c558490818ab8829f39650f70921361da031541aec7dd70d62b2" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.321206 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dzrqt"] Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.327905 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vcct8"] Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.330898 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vcct8"] Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.342002 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mklh7"] Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.346196 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mklh7"] Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.349699 4765 scope.go:117] "RemoveContainer" containerID="3a8c2fb53ede4e46a9ea869a26ba912719d8b080e6ef9dc7c10fb16b9ea3d78d" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.355213 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k44ps"] Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.362278 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-k44ps"] Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.367300 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cqthw"] Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.369716 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-cqthw"] Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.374906 4765 scope.go:117] "RemoveContainer" containerID="c6917d1dc420413aa4334cc8c7d6f31fcacdee520adfc87528b0636c3b446960" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.384629 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-sxwbc" podStartSLOduration=4.384607927 podStartE2EDuration="4.384607927s" podCreationTimestamp="2025-12-10 06:51:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:51:51.378599819 +0000 UTC m=+231.105265135" watchObservedRunningTime="2025-12-10 06:51:51.384607927 +0000 UTC m=+231.111273243" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.393484 4765 scope.go:117] "RemoveContainer" containerID="2cc19ae58a4f36e671e12b3b76273926b12bcb03a61a159a75f1ee914161f004" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.400110 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l9jw8"] Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.409344 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-l9jw8"] Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.411962 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-98pgv"] Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.414424 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-98pgv"] Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.418208 4765 scope.go:117] "RemoveContainer" containerID="8e8fdb26ab0e1d21588cf1c79f7bc39952df3bbd457a0da5889c84d7fde28480" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.431018 4765 scope.go:117] "RemoveContainer" containerID="23af2783bcb23ff155bea99d928b904a0770871325a29f8248dbef8e59a05875" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.445563 4765 scope.go:117] "RemoveContainer" containerID="a021a784b588d700a80362ffabfbec88d1760efafe97ea2989b507b6b3c3f2b1" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.464339 4765 scope.go:117] "RemoveContainer" containerID="9607fc725c18ff05f81fbdc6cd28582549c7ca82ab43e4ad7193dbe45fceee93" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.481366 4765 scope.go:117] "RemoveContainer" containerID="7c56056fdc1bba00b03e97dd39668e9c9f4f3f60a7a73ae09223432ff41471b1" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.508056 4765 scope.go:117] "RemoveContainer" containerID="5cad3eef6bd7b89c1267f8190848b11209cced6aeb8c7df852b429d820315c46" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.522208 4765 scope.go:117] "RemoveContainer" containerID="91489819da3c4cd27a457ba82a733f6c7912a44f9193ad6e673aef29a766f552" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.534734 4765 scope.go:117] "RemoveContainer" containerID="933300b6612795603cbfd9a376c5db2c937426ed3564aa145674dedfb58ebe88" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.548278 4765 scope.go:117] "RemoveContainer" containerID="ec1583c94152aa971f49d36919d257a2219914d1a11518add38a26ba3dfec47c" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.565996 4765 scope.go:117] "RemoveContainer" containerID="8f12008bce00b371efa068a390d155a157414319e4353cbc87cc675aa03e1576" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.578528 4765 scope.go:117] "RemoveContainer" containerID="db2d6231b064740e710375bed0966611867fbcc45c43b2ef15b6b357cdbc1ba6" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.589253 4765 scope.go:117] "RemoveContainer" containerID="f140964dc403aa01df86b472c12d94b87bf7067d83dd9e69910d2594529cb4a8" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.601072 4765 scope.go:117] "RemoveContainer" containerID="34a291d80ce76d65e373ab44e4c3ae603e5eec51de0da16b1321923a76fd36da" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.613036 4765 scope.go:117] "RemoveContainer" containerID="90d338389c09afdf6d96d3d9009493536090262178181702fe07f14859495957" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.624368 4765 scope.go:117] "RemoveContainer" containerID="13e332386d42cd7ea52c282ca9cfdf2bd30efb9d2c85c6784711c810e1425119" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.640048 4765 scope.go:117] "RemoveContainer" containerID="4086ccb48734c6ed8c36be50924b59c9cb23ff942c2edce231a8f0602323f841" Dec 10 06:51:51 crc kubenswrapper[4765]: I1210 06:51:51.659292 4765 scope.go:117] "RemoveContainer" containerID="93bae6ae77af242f8e388001813976b5af5d11e88b7997698b4bc955123d7de0" Dec 10 06:51:52 crc kubenswrapper[4765]: I1210 06:51:52.597416 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d0a35e0-faa1-4efe-9861-75f4d1fa7f55" path="/var/lib/kubelet/pods/1d0a35e0-faa1-4efe-9861-75f4d1fa7f55/volumes" Dec 10 06:51:52 crc kubenswrapper[4765]: I1210 06:51:52.598159 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a320eac-17e0-42a1-8e4d-3f1c87a72e92" path="/var/lib/kubelet/pods/2a320eac-17e0-42a1-8e4d-3f1c87a72e92/volumes" Dec 10 06:51:52 crc kubenswrapper[4765]: I1210 06:51:52.599030 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f0fa6f1-2039-4003-88b8-d0fccce70b29" path="/var/lib/kubelet/pods/2f0fa6f1-2039-4003-88b8-d0fccce70b29/volumes" Dec 10 06:51:52 crc kubenswrapper[4765]: I1210 06:51:52.600184 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f7e189b-f64e-4a06-b5ac-ec94f0f3c994" path="/var/lib/kubelet/pods/4f7e189b-f64e-4a06-b5ac-ec94f0f3c994/volumes" Dec 10 06:51:52 crc kubenswrapper[4765]: I1210 06:51:52.600755 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b4761b3-3632-4441-897d-4ab2635b7630" path="/var/lib/kubelet/pods/7b4761b3-3632-4441-897d-4ab2635b7630/volumes" Dec 10 06:51:52 crc kubenswrapper[4765]: I1210 06:51:52.601910 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76" path="/var/lib/kubelet/pods/7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76/volumes" Dec 10 06:51:52 crc kubenswrapper[4765]: I1210 06:51:52.602359 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7909736-a9e6-4791-942b-31997fe6d3ee" path="/var/lib/kubelet/pods/a7909736-a9e6-4791-942b-31997fe6d3ee/volumes" Dec 10 06:51:52 crc kubenswrapper[4765]: I1210 06:51:52.602900 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c073bf0a-0806-4af1-9902-a0fe221901be" path="/var/lib/kubelet/pods/c073bf0a-0806-4af1-9902-a0fe221901be/volumes" Dec 10 06:51:52 crc kubenswrapper[4765]: I1210 06:51:52.603814 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5fd4051-3069-4ca7-829d-ad57d3d50c4c" path="/var/lib/kubelet/pods/c5fd4051-3069-4ca7-829d-ad57d3d50c4c/volumes" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.092572 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-98gbr"] Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.092801 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c073bf0a-0806-4af1-9902-a0fe221901be" containerName="extract-content" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.092815 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c073bf0a-0806-4af1-9902-a0fe221901be" containerName="extract-content" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.092831 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7909736-a9e6-4791-942b-31997fe6d3ee" containerName="extract-content" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.092838 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7909736-a9e6-4791-942b-31997fe6d3ee" containerName="extract-content" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.092848 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7909736-a9e6-4791-942b-31997fe6d3ee" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.092856 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7909736-a9e6-4791-942b-31997fe6d3ee" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.092867 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d0a35e0-faa1-4efe-9861-75f4d1fa7f55" containerName="extract-utilities" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.092874 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d0a35e0-faa1-4efe-9861-75f4d1fa7f55" containerName="extract-utilities" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.092884 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f0fa6f1-2039-4003-88b8-d0fccce70b29" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.092891 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f0fa6f1-2039-4003-88b8-d0fccce70b29" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.092902 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c073bf0a-0806-4af1-9902-a0fe221901be" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.092909 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c073bf0a-0806-4af1-9902-a0fe221901be" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.092919 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5fd4051-3069-4ca7-829d-ad57d3d50c4c" containerName="extract-utilities" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.092926 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5fd4051-3069-4ca7-829d-ad57d3d50c4c" containerName="extract-utilities" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.092934 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5fd4051-3069-4ca7-829d-ad57d3d50c4c" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.092941 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5fd4051-3069-4ca7-829d-ad57d3d50c4c" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.092952 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f7e189b-f64e-4a06-b5ac-ec94f0f3c994" containerName="extract-utilities" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.092959 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f7e189b-f64e-4a06-b5ac-ec94f0f3c994" containerName="extract-utilities" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.092969 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f0fa6f1-2039-4003-88b8-d0fccce70b29" containerName="extract-utilities" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.092977 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f0fa6f1-2039-4003-88b8-d0fccce70b29" containerName="extract-utilities" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.092985 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d0a35e0-faa1-4efe-9861-75f4d1fa7f55" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.092992 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d0a35e0-faa1-4efe-9861-75f4d1fa7f55" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.093003 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f7e189b-f64e-4a06-b5ac-ec94f0f3c994" containerName="extract-content" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093010 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f7e189b-f64e-4a06-b5ac-ec94f0f3c994" containerName="extract-content" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.093018 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a320eac-17e0-42a1-8e4d-3f1c87a72e92" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093025 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a320eac-17e0-42a1-8e4d-3f1c87a72e92" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.093035 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76" containerName="marketplace-operator" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093042 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76" containerName="marketplace-operator" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.093052 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a320eac-17e0-42a1-8e4d-3f1c87a72e92" containerName="extract-content" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093059 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a320eac-17e0-42a1-8e4d-3f1c87a72e92" containerName="extract-content" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.093069 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a320eac-17e0-42a1-8e4d-3f1c87a72e92" containerName="extract-utilities" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093075 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a320eac-17e0-42a1-8e4d-3f1c87a72e92" containerName="extract-utilities" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.093112 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b4761b3-3632-4441-897d-4ab2635b7630" containerName="extract-content" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093121 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b4761b3-3632-4441-897d-4ab2635b7630" containerName="extract-content" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.093130 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f0fa6f1-2039-4003-88b8-d0fccce70b29" containerName="extract-content" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093137 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f0fa6f1-2039-4003-88b8-d0fccce70b29" containerName="extract-content" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.093145 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d0a35e0-faa1-4efe-9861-75f4d1fa7f55" containerName="extract-content" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093153 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d0a35e0-faa1-4efe-9861-75f4d1fa7f55" containerName="extract-content" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.093163 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c073bf0a-0806-4af1-9902-a0fe221901be" containerName="extract-utilities" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093170 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c073bf0a-0806-4af1-9902-a0fe221901be" containerName="extract-utilities" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.093179 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5fd4051-3069-4ca7-829d-ad57d3d50c4c" containerName="extract-content" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093186 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5fd4051-3069-4ca7-829d-ad57d3d50c4c" containerName="extract-content" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.093196 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b4761b3-3632-4441-897d-4ab2635b7630" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093204 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b4761b3-3632-4441-897d-4ab2635b7630" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.093214 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f7e189b-f64e-4a06-b5ac-ec94f0f3c994" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093222 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f7e189b-f64e-4a06-b5ac-ec94f0f3c994" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.093229 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b4761b3-3632-4441-897d-4ab2635b7630" containerName="extract-utilities" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093238 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b4761b3-3632-4441-897d-4ab2635b7630" containerName="extract-utilities" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.093248 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7909736-a9e6-4791-942b-31997fe6d3ee" containerName="extract-utilities" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093255 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7909736-a9e6-4791-942b-31997fe6d3ee" containerName="extract-utilities" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093361 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f7e189b-f64e-4a06-b5ac-ec94f0f3c994" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093378 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d0a35e0-faa1-4efe-9861-75f4d1fa7f55" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093388 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="c073bf0a-0806-4af1-9902-a0fe221901be" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093396 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7909736-a9e6-4791-942b-31997fe6d3ee" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093409 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a320eac-17e0-42a1-8e4d-3f1c87a72e92" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093420 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5fd4051-3069-4ca7-829d-ad57d3d50c4c" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093429 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f0fa6f1-2039-4003-88b8-d0fccce70b29" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093437 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d3c108c-57f5-4c5e-9bcd-eaff0a53cf76" containerName="marketplace-operator" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.093447 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b4761b3-3632-4441-897d-4ab2635b7630" containerName="registry-server" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.099767 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-98gbr" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.106508 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.109615 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-98gbr"] Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.176299 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsjpj\" (UniqueName: \"kubernetes.io/projected/ea399b94-8696-405b-956e-a807aca44b1c-kube-api-access-fsjpj\") pod \"redhat-marketplace-98gbr\" (UID: \"ea399b94-8696-405b-956e-a807aca44b1c\") " pod="openshift-marketplace/redhat-marketplace-98gbr" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.176366 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea399b94-8696-405b-956e-a807aca44b1c-catalog-content\") pod \"redhat-marketplace-98gbr\" (UID: \"ea399b94-8696-405b-956e-a807aca44b1c\") " pod="openshift-marketplace/redhat-marketplace-98gbr" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.176413 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea399b94-8696-405b-956e-a807aca44b1c-utilities\") pod \"redhat-marketplace-98gbr\" (UID: \"ea399b94-8696-405b-956e-a807aca44b1c\") " pod="openshift-marketplace/redhat-marketplace-98gbr" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.277165 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea399b94-8696-405b-956e-a807aca44b1c-utilities\") pod \"redhat-marketplace-98gbr\" (UID: \"ea399b94-8696-405b-956e-a807aca44b1c\") " pod="openshift-marketplace/redhat-marketplace-98gbr" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.277270 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsjpj\" (UniqueName: \"kubernetes.io/projected/ea399b94-8696-405b-956e-a807aca44b1c-kube-api-access-fsjpj\") pod \"redhat-marketplace-98gbr\" (UID: \"ea399b94-8696-405b-956e-a807aca44b1c\") " pod="openshift-marketplace/redhat-marketplace-98gbr" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.277290 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea399b94-8696-405b-956e-a807aca44b1c-catalog-content\") pod \"redhat-marketplace-98gbr\" (UID: \"ea399b94-8696-405b-956e-a807aca44b1c\") " pod="openshift-marketplace/redhat-marketplace-98gbr" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.277892 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea399b94-8696-405b-956e-a807aca44b1c-utilities\") pod \"redhat-marketplace-98gbr\" (UID: \"ea399b94-8696-405b-956e-a807aca44b1c\") " pod="openshift-marketplace/redhat-marketplace-98gbr" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.277920 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea399b94-8696-405b-956e-a807aca44b1c-catalog-content\") pod \"redhat-marketplace-98gbr\" (UID: \"ea399b94-8696-405b-956e-a807aca44b1c\") " pod="openshift-marketplace/redhat-marketplace-98gbr" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.309295 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsjpj\" (UniqueName: \"kubernetes.io/projected/ea399b94-8696-405b-956e-a807aca44b1c-kube-api-access-fsjpj\") pod \"redhat-marketplace-98gbr\" (UID: \"ea399b94-8696-405b-956e-a807aca44b1c\") " pod="openshift-marketplace/redhat-marketplace-98gbr" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.313630 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-swxql"] Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.314773 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-swxql" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.317554 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.320985 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-swxql"] Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.378213 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44rb7\" (UniqueName: \"kubernetes.io/projected/c0877bed-7b01-43a1-8704-eba99bb1e38d-kube-api-access-44rb7\") pod \"redhat-operators-swxql\" (UID: \"c0877bed-7b01-43a1-8704-eba99bb1e38d\") " pod="openshift-marketplace/redhat-operators-swxql" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.378290 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0877bed-7b01-43a1-8704-eba99bb1e38d-utilities\") pod \"redhat-operators-swxql\" (UID: \"c0877bed-7b01-43a1-8704-eba99bb1e38d\") " pod="openshift-marketplace/redhat-operators-swxql" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.378319 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0877bed-7b01-43a1-8704-eba99bb1e38d-catalog-content\") pod \"redhat-operators-swxql\" (UID: \"c0877bed-7b01-43a1-8704-eba99bb1e38d\") " pod="openshift-marketplace/redhat-operators-swxql" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.417989 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-98gbr" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.484031 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44rb7\" (UniqueName: \"kubernetes.io/projected/c0877bed-7b01-43a1-8704-eba99bb1e38d-kube-api-access-44rb7\") pod \"redhat-operators-swxql\" (UID: \"c0877bed-7b01-43a1-8704-eba99bb1e38d\") " pod="openshift-marketplace/redhat-operators-swxql" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.484115 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0877bed-7b01-43a1-8704-eba99bb1e38d-utilities\") pod \"redhat-operators-swxql\" (UID: \"c0877bed-7b01-43a1-8704-eba99bb1e38d\") " pod="openshift-marketplace/redhat-operators-swxql" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.484162 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0877bed-7b01-43a1-8704-eba99bb1e38d-catalog-content\") pod \"redhat-operators-swxql\" (UID: \"c0877bed-7b01-43a1-8704-eba99bb1e38d\") " pod="openshift-marketplace/redhat-operators-swxql" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.484702 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0877bed-7b01-43a1-8704-eba99bb1e38d-utilities\") pod \"redhat-operators-swxql\" (UID: \"c0877bed-7b01-43a1-8704-eba99bb1e38d\") " pod="openshift-marketplace/redhat-operators-swxql" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.484729 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0877bed-7b01-43a1-8704-eba99bb1e38d-catalog-content\") pod \"redhat-operators-swxql\" (UID: \"c0877bed-7b01-43a1-8704-eba99bb1e38d\") " pod="openshift-marketplace/redhat-operators-swxql" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.498388 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44rb7\" (UniqueName: \"kubernetes.io/projected/c0877bed-7b01-43a1-8704-eba99bb1e38d-kube-api-access-44rb7\") pod \"redhat-operators-swxql\" (UID: \"c0877bed-7b01-43a1-8704-eba99bb1e38d\") " pod="openshift-marketplace/redhat-operators-swxql" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.633484 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-swxql" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.799926 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-98gbr"] Dec 10 06:51:53 crc kubenswrapper[4765]: W1210 06:51:53.803969 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podea399b94_8696_405b_956e_a807aca44b1c.slice/crio-552b90b4862afaffbfc9d479980db50df5016625dd13517a53e8b273134f40ec WatchSource:0}: Error finding container 552b90b4862afaffbfc9d479980db50df5016625dd13517a53e8b273134f40ec: Status 404 returned error can't find the container with id 552b90b4862afaffbfc9d479980db50df5016625dd13517a53e8b273134f40ec Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.894261 4765 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.894924 4765 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.894951 4765 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.895054 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.895064 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.895075 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.895097 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.895104 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.895110 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.895122 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.895133 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.895142 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.895149 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.895158 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.895165 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 10 06:51:53 crc kubenswrapper[4765]: E1210 06:51:53.895181 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.895188 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.895269 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.895281 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.895290 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.895301 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.895314 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.895546 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.896237 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.896489 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d" gracePeriod=15 Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.897036 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78" gracePeriod=15 Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.897031 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80" gracePeriod=15 Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.897211 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f" gracePeriod=15 Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.897242 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada" gracePeriod=15 Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.900037 4765 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 10 06:51:53 crc kubenswrapper[4765]: I1210 06:51:53.930340 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.014685 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.014751 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.014941 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.015032 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.015058 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.015188 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.015213 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.015239 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.043923 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-swxql"] Dec 10 06:51:54 crc kubenswrapper[4765]: W1210 06:51:54.050735 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0877bed_7b01_43a1_8704_eba99bb1e38d.slice/crio-e1f5f2380aab85ab9889d7b0b34053b2c894cf5009bc9213b61ede53f425cb44 WatchSource:0}: Error finding container e1f5f2380aab85ab9889d7b0b34053b2c894cf5009bc9213b61ede53f425cb44: Status 404 returned error can't find the container with id e1f5f2380aab85ab9889d7b0b34053b2c894cf5009bc9213b61ede53f425cb44 Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.116611 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.116642 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.116720 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.116721 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.116795 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.116814 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.116844 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.116824 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.116879 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.116913 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.116947 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.116963 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.116967 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.116985 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.116987 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.117001 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.227506 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:51:54 crc kubenswrapper[4765]: W1210 06:51:54.247455 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-039aa7a2c1883ec20fc442b8e016cbb73a335f4ff6d468b44e1558b76ca6b7d2 WatchSource:0}: Error finding container 039aa7a2c1883ec20fc442b8e016cbb73a335f4ff6d468b44e1558b76ca6b7d2: Status 404 returned error can't find the container with id 039aa7a2c1883ec20fc442b8e016cbb73a335f4ff6d468b44e1558b76ca6b7d2 Dec 10 06:51:54 crc kubenswrapper[4765]: E1210 06:51:54.250797 4765 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.41:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187fc807ad43e0be openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-10 06:51:54.249756862 +0000 UTC m=+233.976422178,LastTimestamp:2025-12-10 06:51:54.249756862 +0000 UTC m=+233.976422178,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.317795 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"039aa7a2c1883ec20fc442b8e016cbb73a335f4ff6d468b44e1558b76ca6b7d2"} Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.318722 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-swxql" event={"ID":"c0877bed-7b01-43a1-8704-eba99bb1e38d","Type":"ContainerStarted","Data":"e1f5f2380aab85ab9889d7b0b34053b2c894cf5009bc9213b61ede53f425cb44"} Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.320241 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-98gbr" event={"ID":"ea399b94-8696-405b-956e-a807aca44b1c","Type":"ContainerStarted","Data":"552b90b4862afaffbfc9d479980db50df5016625dd13517a53e8b273134f40ec"} Dec 10 06:51:54 crc kubenswrapper[4765]: E1210 06:51:54.876656 4765 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:54 crc kubenswrapper[4765]: E1210 06:51:54.877575 4765 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:54 crc kubenswrapper[4765]: E1210 06:51:54.877920 4765 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:54 crc kubenswrapper[4765]: E1210 06:51:54.878332 4765 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:54 crc kubenswrapper[4765]: E1210 06:51:54.878591 4765 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:54 crc kubenswrapper[4765]: I1210 06:51:54.878627 4765 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 10 06:51:54 crc kubenswrapper[4765]: E1210 06:51:54.878977 4765 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="200ms" Dec 10 06:51:55 crc kubenswrapper[4765]: E1210 06:51:55.079709 4765 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="400ms" Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.326741 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"f7f958e6bc6dbfce66c0f5c7c0a1ae59209c8e96e51228047edf42b55dde82d6"} Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.327288 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.328459 4765 generic.go:334] "Generic (PLEG): container finished" podID="c0877bed-7b01-43a1-8704-eba99bb1e38d" containerID="f470c5d8f1dd474870030b573cb11919ca4e096d9c23f0e0969d81cc8af6b190" exitCode=0 Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.328534 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-swxql" event={"ID":"c0877bed-7b01-43a1-8704-eba99bb1e38d","Type":"ContainerDied","Data":"f470c5d8f1dd474870030b573cb11919ca4e096d9c23f0e0969d81cc8af6b190"} Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.329061 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.329320 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.330893 4765 generic.go:334] "Generic (PLEG): container finished" podID="ea399b94-8696-405b-956e-a807aca44b1c" containerID="61a7389e106b44b75fba2c2c4ac6c28cad1bc883115727b7fa4ace57395db4d0" exitCode=0 Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.331065 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-98gbr" event={"ID":"ea399b94-8696-405b-956e-a807aca44b1c","Type":"ContainerDied","Data":"61a7389e106b44b75fba2c2c4ac6c28cad1bc883115727b7fa4ace57395db4d0"} Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.331390 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.331861 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.332176 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.334802 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.336100 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.336696 4765 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78" exitCode=0 Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.336732 4765 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80" exitCode=0 Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.336742 4765 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f" exitCode=0 Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.336753 4765 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada" exitCode=2 Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.336783 4765 scope.go:117] "RemoveContainer" containerID="3dff02da16af8e750f9adcd05b72fbc0359355abc76b8c81b433aa3e8f9e666a" Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.338785 4765 generic.go:334] "Generic (PLEG): container finished" podID="eef505b2-623b-437c-8207-550da3a806ba" containerID="620c2d5d9055cc2556ee493712a2ac52b1b2bb899d8b00b1f5e3fddedd818fb7" exitCode=0 Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.338841 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"eef505b2-623b-437c-8207-550da3a806ba","Type":"ContainerDied","Data":"620c2d5d9055cc2556ee493712a2ac52b1b2bb899d8b00b1f5e3fddedd818fb7"} Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.339691 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.340029 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.340310 4765 status_manager.go:851] "Failed to get status for pod" podUID="eef505b2-623b-437c-8207-550da3a806ba" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:55 crc kubenswrapper[4765]: I1210 06:51:55.340501 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:55 crc kubenswrapper[4765]: E1210 06:51:55.481444 4765 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="800ms" Dec 10 06:51:56 crc kubenswrapper[4765]: E1210 06:51:56.093876 4765 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.41:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187fc807ad43e0be openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-10 06:51:54.249756862 +0000 UTC m=+233.976422178,LastTimestamp:2025-12-10 06:51:54.249756862 +0000 UTC m=+233.976422178,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 10 06:51:56 crc kubenswrapper[4765]: E1210 06:51:56.282373 4765 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="1.6s" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.347388 4765 generic.go:334] "Generic (PLEG): container finished" podID="ea399b94-8696-405b-956e-a807aca44b1c" containerID="d89b1f3b3dbfcee624872609a81a024cc78da50670d7c0a1f1ae281496715f8f" exitCode=0 Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.347478 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-98gbr" event={"ID":"ea399b94-8696-405b-956e-a807aca44b1c","Type":"ContainerDied","Data":"d89b1f3b3dbfcee624872609a81a024cc78da50670d7c0a1f1ae281496715f8f"} Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.348204 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.348516 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.348702 4765 status_manager.go:851] "Failed to get status for pod" podUID="eef505b2-623b-437c-8207-550da3a806ba" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.350061 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.352071 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.502986 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.503715 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.504242 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.504476 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.504699 4765 status_manager.go:851] "Failed to get status for pod" podUID="eef505b2-623b-437c-8207-550da3a806ba" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.504925 4765 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.505229 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.547730 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.548157 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.548207 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.548459 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.548477 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.548507 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.595262 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.597071 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.597517 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.597674 4765 status_manager.go:851] "Failed to get status for pod" podUID="eef505b2-623b-437c-8207-550da3a806ba" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.597819 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.598021 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.649592 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/eef505b2-623b-437c-8207-550da3a806ba-var-lock\") pod \"eef505b2-623b-437c-8207-550da3a806ba\" (UID: \"eef505b2-623b-437c-8207-550da3a806ba\") " Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.649655 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eef505b2-623b-437c-8207-550da3a806ba-kubelet-dir\") pod \"eef505b2-623b-437c-8207-550da3a806ba\" (UID: \"eef505b2-623b-437c-8207-550da3a806ba\") " Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.649719 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/eef505b2-623b-437c-8207-550da3a806ba-var-lock" (OuterVolumeSpecName: "var-lock") pod "eef505b2-623b-437c-8207-550da3a806ba" (UID: "eef505b2-623b-437c-8207-550da3a806ba"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.649766 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eef505b2-623b-437c-8207-550da3a806ba-kube-api-access\") pod \"eef505b2-623b-437c-8207-550da3a806ba\" (UID: \"eef505b2-623b-437c-8207-550da3a806ba\") " Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.649793 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/eef505b2-623b-437c-8207-550da3a806ba-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "eef505b2-623b-437c-8207-550da3a806ba" (UID: "eef505b2-623b-437c-8207-550da3a806ba"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.650233 4765 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.650261 4765 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.650272 4765 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/eef505b2-623b-437c-8207-550da3a806ba-var-lock\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.650283 4765 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eef505b2-623b-437c-8207-550da3a806ba-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.650295 4765 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.656613 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eef505b2-623b-437c-8207-550da3a806ba-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "eef505b2-623b-437c-8207-550da3a806ba" (UID: "eef505b2-623b-437c-8207-550da3a806ba"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:51:56 crc kubenswrapper[4765]: I1210 06:51:56.751560 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eef505b2-623b-437c-8207-550da3a806ba-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.363528 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"eef505b2-623b-437c-8207-550da3a806ba","Type":"ContainerDied","Data":"8db4ad127ffa99c981b4fb1daa77bf98f8179aecf60a946336eee50f1018d7df"} Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.364016 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8db4ad127ffa99c981b4fb1daa77bf98f8179aecf60a946336eee50f1018d7df" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.363559 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.375767 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.377846 4765 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d" exitCode=0 Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.377945 4765 scope.go:117] "RemoveContainer" containerID="4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.378062 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.378956 4765 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.379567 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.379971 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.380331 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.380546 4765 status_manager.go:851] "Failed to get status for pod" podUID="eef505b2-623b-437c-8207-550da3a806ba" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.383831 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.384977 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.385045 4765 generic.go:334] "Generic (PLEG): container finished" podID="c0877bed-7b01-43a1-8704-eba99bb1e38d" containerID="3c878e749e554f615f44824e4667b153793f78f4d496fdc2fab7c316b2acb36d" exitCode=0 Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.385174 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-swxql" event={"ID":"c0877bed-7b01-43a1-8704-eba99bb1e38d","Type":"ContainerDied","Data":"3c878e749e554f615f44824e4667b153793f78f4d496fdc2fab7c316b2acb36d"} Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.387390 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-98gbr" event={"ID":"ea399b94-8696-405b-956e-a807aca44b1c","Type":"ContainerStarted","Data":"e908a0081a33fb5c21f61a44c8cc5f30c5173ef5451ed0e2aff3672ee9ebe003"} Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.387382 4765 status_manager.go:851] "Failed to get status for pod" podUID="eef505b2-623b-437c-8207-550da3a806ba" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.387800 4765 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.388197 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.388584 4765 status_manager.go:851] "Failed to get status for pod" podUID="eef505b2-623b-437c-8207-550da3a806ba" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.388777 4765 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.388936 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.389115 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.389308 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.400248 4765 scope.go:117] "RemoveContainer" containerID="fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.416914 4765 scope.go:117] "RemoveContainer" containerID="228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.436752 4765 scope.go:117] "RemoveContainer" containerID="191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.469386 4765 scope.go:117] "RemoveContainer" containerID="960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.490059 4765 scope.go:117] "RemoveContainer" containerID="0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.510004 4765 scope.go:117] "RemoveContainer" containerID="4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78" Dec 10 06:51:57 crc kubenswrapper[4765]: E1210 06:51:57.510469 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\": container with ID starting with 4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78 not found: ID does not exist" containerID="4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.510504 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78"} err="failed to get container status \"4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\": rpc error: code = NotFound desc = could not find container \"4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78\": container with ID starting with 4fe17677575775d4f0f89db18b1b534f58c19b032e9ef38f67e5c0bda2f2da78 not found: ID does not exist" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.510530 4765 scope.go:117] "RemoveContainer" containerID="fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80" Dec 10 06:51:57 crc kubenswrapper[4765]: E1210 06:51:57.510869 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\": container with ID starting with fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80 not found: ID does not exist" containerID="fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.510896 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80"} err="failed to get container status \"fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\": rpc error: code = NotFound desc = could not find container \"fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80\": container with ID starting with fc18e3f077f502f7cff796d7d642964553587d82536bde92ff43f6c4f6b61e80 not found: ID does not exist" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.510916 4765 scope.go:117] "RemoveContainer" containerID="228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f" Dec 10 06:51:57 crc kubenswrapper[4765]: E1210 06:51:57.511140 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\": container with ID starting with 228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f not found: ID does not exist" containerID="228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.511171 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f"} err="failed to get container status \"228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\": rpc error: code = NotFound desc = could not find container \"228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f\": container with ID starting with 228cc96007452f443b2a040e998e0ebf568eeac4bdd5d034256446be851b033f not found: ID does not exist" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.511192 4765 scope.go:117] "RemoveContainer" containerID="191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada" Dec 10 06:51:57 crc kubenswrapper[4765]: E1210 06:51:57.511592 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\": container with ID starting with 191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada not found: ID does not exist" containerID="191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.511617 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada"} err="failed to get container status \"191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\": rpc error: code = NotFound desc = could not find container \"191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada\": container with ID starting with 191dd4415adccb548ac3da096dc40f18eb9aa03c9a695717fab960232f9d9ada not found: ID does not exist" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.511637 4765 scope.go:117] "RemoveContainer" containerID="960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d" Dec 10 06:51:57 crc kubenswrapper[4765]: E1210 06:51:57.511899 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\": container with ID starting with 960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d not found: ID does not exist" containerID="960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.511935 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d"} err="failed to get container status \"960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\": rpc error: code = NotFound desc = could not find container \"960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d\": container with ID starting with 960c9ea5071fee865e087c00d1598f1a238c041d29faa87d1da6832e36b0bb0d not found: ID does not exist" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.511952 4765 scope.go:117] "RemoveContainer" containerID="0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d" Dec 10 06:51:57 crc kubenswrapper[4765]: E1210 06:51:57.512201 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\": container with ID starting with 0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d not found: ID does not exist" containerID="0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d" Dec 10 06:51:57 crc kubenswrapper[4765]: I1210 06:51:57.512226 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d"} err="failed to get container status \"0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\": rpc error: code = NotFound desc = could not find container \"0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d\": container with ID starting with 0641b9859b3ac96688894b5a2b672b07b756d4a07e7301a66d4c44111b391e3d not found: ID does not exist" Dec 10 06:51:57 crc kubenswrapper[4765]: E1210 06:51:57.883327 4765 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="3.2s" Dec 10 06:51:59 crc kubenswrapper[4765]: I1210 06:51:59.401573 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-swxql" event={"ID":"c0877bed-7b01-43a1-8704-eba99bb1e38d","Type":"ContainerStarted","Data":"a2bcb1bbdb2933b47509c6bf8622917b387c081f95b812f34fcc83c35590d345"} Dec 10 06:51:59 crc kubenswrapper[4765]: I1210 06:51:59.402746 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:59 crc kubenswrapper[4765]: I1210 06:51:59.403134 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:59 crc kubenswrapper[4765]: I1210 06:51:59.403386 4765 status_manager.go:851] "Failed to get status for pod" podUID="eef505b2-623b-437c-8207-550da3a806ba" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:51:59 crc kubenswrapper[4765]: I1210 06:51:59.403598 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:00 crc kubenswrapper[4765]: I1210 06:52:00.592378 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:00 crc kubenswrapper[4765]: I1210 06:52:00.593131 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:00 crc kubenswrapper[4765]: I1210 06:52:00.593556 4765 status_manager.go:851] "Failed to get status for pod" podUID="eef505b2-623b-437c-8207-550da3a806ba" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:00 crc kubenswrapper[4765]: I1210 06:52:00.593842 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:01 crc kubenswrapper[4765]: E1210 06:52:01.084786 4765 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="6.4s" Dec 10 06:52:03 crc kubenswrapper[4765]: I1210 06:52:03.419065 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-98gbr" Dec 10 06:52:03 crc kubenswrapper[4765]: I1210 06:52:03.419609 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-98gbr" Dec 10 06:52:03 crc kubenswrapper[4765]: I1210 06:52:03.461523 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-98gbr" Dec 10 06:52:03 crc kubenswrapper[4765]: I1210 06:52:03.462143 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:03 crc kubenswrapper[4765]: I1210 06:52:03.462377 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:03 crc kubenswrapper[4765]: I1210 06:52:03.462628 4765 status_manager.go:851] "Failed to get status for pod" podUID="eef505b2-623b-437c-8207-550da3a806ba" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:03 crc kubenswrapper[4765]: I1210 06:52:03.462847 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:03 crc kubenswrapper[4765]: I1210 06:52:03.634548 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-swxql" Dec 10 06:52:03 crc kubenswrapper[4765]: I1210 06:52:03.634623 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-swxql" Dec 10 06:52:04 crc kubenswrapper[4765]: I1210 06:52:04.458807 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-98gbr" Dec 10 06:52:04 crc kubenswrapper[4765]: I1210 06:52:04.459395 4765 status_manager.go:851] "Failed to get status for pod" podUID="eef505b2-623b-437c-8207-550da3a806ba" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:04 crc kubenswrapper[4765]: I1210 06:52:04.460068 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:04 crc kubenswrapper[4765]: I1210 06:52:04.460660 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:04 crc kubenswrapper[4765]: I1210 06:52:04.460949 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:04 crc kubenswrapper[4765]: I1210 06:52:04.667749 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-swxql" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" containerName="registry-server" probeResult="failure" output=< Dec 10 06:52:04 crc kubenswrapper[4765]: timeout: failed to connect service ":50051" within 1s Dec 10 06:52:04 crc kubenswrapper[4765]: > Dec 10 06:52:06 crc kubenswrapper[4765]: E1210 06:52:06.095645 4765 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.41:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187fc807ad43e0be openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-10 06:51:54.249756862 +0000 UTC m=+233.976422178,LastTimestamp:2025-12-10 06:51:54.249756862 +0000 UTC m=+233.976422178,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 10 06:52:07 crc kubenswrapper[4765]: E1210 06:52:07.486177 4765 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="7s" Dec 10 06:52:07 crc kubenswrapper[4765]: I1210 06:52:07.588382 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:52:07 crc kubenswrapper[4765]: I1210 06:52:07.589349 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:07 crc kubenswrapper[4765]: I1210 06:52:07.589625 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:07 crc kubenswrapper[4765]: I1210 06:52:07.589913 4765 status_manager.go:851] "Failed to get status for pod" podUID="eef505b2-623b-437c-8207-550da3a806ba" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:07 crc kubenswrapper[4765]: I1210 06:52:07.590167 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:07 crc kubenswrapper[4765]: I1210 06:52:07.601696 4765 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="901d0a63-c83f-4175-9e6f-70695c2ee2ff" Dec 10 06:52:07 crc kubenswrapper[4765]: I1210 06:52:07.601725 4765 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="901d0a63-c83f-4175-9e6f-70695c2ee2ff" Dec 10 06:52:07 crc kubenswrapper[4765]: E1210 06:52:07.602105 4765 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:52:07 crc kubenswrapper[4765]: I1210 06:52:07.602548 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:52:07 crc kubenswrapper[4765]: W1210 06:52:07.619047 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-e09acf685252f54b12aacf5f9f9cc4220a752f20b9c0313d2babafeee1009a39 WatchSource:0}: Error finding container e09acf685252f54b12aacf5f9f9cc4220a752f20b9c0313d2babafeee1009a39: Status 404 returned error can't find the container with id e09acf685252f54b12aacf5f9f9cc4220a752f20b9c0313d2babafeee1009a39 Dec 10 06:52:08 crc kubenswrapper[4765]: I1210 06:52:08.446163 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"e09acf685252f54b12aacf5f9f9cc4220a752f20b9c0313d2babafeee1009a39"} Dec 10 06:52:10 crc kubenswrapper[4765]: I1210 06:52:10.222345 4765 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Liveness probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 10 06:52:10 crc kubenswrapper[4765]: I1210 06:52:10.222442 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 10 06:52:10 crc kubenswrapper[4765]: I1210 06:52:10.593416 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:10 crc kubenswrapper[4765]: I1210 06:52:10.593723 4765 status_manager.go:851] "Failed to get status for pod" podUID="eef505b2-623b-437c-8207-550da3a806ba" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:10 crc kubenswrapper[4765]: I1210 06:52:10.594141 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:10 crc kubenswrapper[4765]: I1210 06:52:10.594321 4765 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:10 crc kubenswrapper[4765]: I1210 06:52:10.594470 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:11 crc kubenswrapper[4765]: I1210 06:52:11.411376 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" podUID="2c029930-d67e-4812-a6e1-bb8d9b806655" containerName="oauth-openshift" containerID="cri-o://ba10ccf49ca766bca86227c81c8f1633eb66e01e2774efaefa1886103e06592a" gracePeriod=15 Dec 10 06:52:11 crc kubenswrapper[4765]: I1210 06:52:11.461382 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5404a5783280f4d9f20b0bda384b66fe482c6e25ec513ae3733cf06bebe1e894"} Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.154385 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.155393 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.155676 4765 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.155869 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.156029 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.156302 4765 status_manager.go:851] "Failed to get status for pod" podUID="2c029930-d67e-4812-a6e1-bb8d9b806655" pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-ljp6b\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.156608 4765 status_manager.go:851] "Failed to get status for pod" podUID="eef505b2-623b-437c-8207-550da3a806ba" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.333616 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-template-error\") pod \"2c029930-d67e-4812-a6e1-bb8d9b806655\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.333674 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-template-provider-selection\") pod \"2c029930-d67e-4812-a6e1-bb8d9b806655\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.333695 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-router-certs\") pod \"2c029930-d67e-4812-a6e1-bb8d9b806655\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.333731 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-cliconfig\") pod \"2c029930-d67e-4812-a6e1-bb8d9b806655\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.333750 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2f2fd\" (UniqueName: \"kubernetes.io/projected/2c029930-d67e-4812-a6e1-bb8d9b806655-kube-api-access-2f2fd\") pod \"2c029930-d67e-4812-a6e1-bb8d9b806655\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.333768 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2c029930-d67e-4812-a6e1-bb8d9b806655-audit-dir\") pod \"2c029930-d67e-4812-a6e1-bb8d9b806655\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.333810 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-trusted-ca-bundle\") pod \"2c029930-d67e-4812-a6e1-bb8d9b806655\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.333841 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-template-login\") pod \"2c029930-d67e-4812-a6e1-bb8d9b806655\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.333859 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-audit-policies\") pod \"2c029930-d67e-4812-a6e1-bb8d9b806655\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.333875 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-idp-0-file-data\") pod \"2c029930-d67e-4812-a6e1-bb8d9b806655\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.333892 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-service-ca\") pod \"2c029930-d67e-4812-a6e1-bb8d9b806655\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.333915 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-session\") pod \"2c029930-d67e-4812-a6e1-bb8d9b806655\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.333915 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2c029930-d67e-4812-a6e1-bb8d9b806655-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "2c029930-d67e-4812-a6e1-bb8d9b806655" (UID: "2c029930-d67e-4812-a6e1-bb8d9b806655"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.334718 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "2c029930-d67e-4812-a6e1-bb8d9b806655" (UID: "2c029930-d67e-4812-a6e1-bb8d9b806655"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.334834 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "2c029930-d67e-4812-a6e1-bb8d9b806655" (UID: "2c029930-d67e-4812-a6e1-bb8d9b806655"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.334857 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "2c029930-d67e-4812-a6e1-bb8d9b806655" (UID: "2c029930-d67e-4812-a6e1-bb8d9b806655"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.335025 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-serving-cert\") pod \"2c029930-d67e-4812-a6e1-bb8d9b806655\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.335256 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-ocp-branding-template\") pod \"2c029930-d67e-4812-a6e1-bb8d9b806655\" (UID: \"2c029930-d67e-4812-a6e1-bb8d9b806655\") " Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.335258 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "2c029930-d67e-4812-a6e1-bb8d9b806655" (UID: "2c029930-d67e-4812-a6e1-bb8d9b806655"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.335470 4765 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.335482 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.335492 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.335502 4765 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2c029930-d67e-4812-a6e1-bb8d9b806655-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.335510 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.341245 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "2c029930-d67e-4812-a6e1-bb8d9b806655" (UID: "2c029930-d67e-4812-a6e1-bb8d9b806655"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.341322 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c029930-d67e-4812-a6e1-bb8d9b806655-kube-api-access-2f2fd" (OuterVolumeSpecName: "kube-api-access-2f2fd") pod "2c029930-d67e-4812-a6e1-bb8d9b806655" (UID: "2c029930-d67e-4812-a6e1-bb8d9b806655"). InnerVolumeSpecName "kube-api-access-2f2fd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.341542 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "2c029930-d67e-4812-a6e1-bb8d9b806655" (UID: "2c029930-d67e-4812-a6e1-bb8d9b806655"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.341804 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "2c029930-d67e-4812-a6e1-bb8d9b806655" (UID: "2c029930-d67e-4812-a6e1-bb8d9b806655"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.342002 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "2c029930-d67e-4812-a6e1-bb8d9b806655" (UID: "2c029930-d67e-4812-a6e1-bb8d9b806655"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.342271 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "2c029930-d67e-4812-a6e1-bb8d9b806655" (UID: "2c029930-d67e-4812-a6e1-bb8d9b806655"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.342522 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "2c029930-d67e-4812-a6e1-bb8d9b806655" (UID: "2c029930-d67e-4812-a6e1-bb8d9b806655"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.342881 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "2c029930-d67e-4812-a6e1-bb8d9b806655" (UID: "2c029930-d67e-4812-a6e1-bb8d9b806655"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.343013 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "2c029930-d67e-4812-a6e1-bb8d9b806655" (UID: "2c029930-d67e-4812-a6e1-bb8d9b806655"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.437001 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.437046 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.437062 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2f2fd\" (UniqueName: \"kubernetes.io/projected/2c029930-d67e-4812-a6e1-bb8d9b806655-kube-api-access-2f2fd\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.437074 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.437104 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.437116 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.437127 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.437138 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.437148 4765 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2c029930-d67e-4812-a6e1-bb8d9b806655-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.467272 4765 generic.go:334] "Generic (PLEG): container finished" podID="2c029930-d67e-4812-a6e1-bb8d9b806655" containerID="ba10ccf49ca766bca86227c81c8f1633eb66e01e2774efaefa1886103e06592a" exitCode=0 Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.467323 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" event={"ID":"2c029930-d67e-4812-a6e1-bb8d9b806655","Type":"ContainerDied","Data":"ba10ccf49ca766bca86227c81c8f1633eb66e01e2774efaefa1886103e06592a"} Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.467374 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" event={"ID":"2c029930-d67e-4812-a6e1-bb8d9b806655","Type":"ContainerDied","Data":"f0e803afd6f5f15d92a7ce45c14d8c8bc790cad385a29e728c3ccd5a7577274d"} Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.467378 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.467398 4765 scope.go:117] "RemoveContainer" containerID="ba10ccf49ca766bca86227c81c8f1633eb66e01e2774efaefa1886103e06592a" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.468202 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.469436 4765 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="5404a5783280f4d9f20b0bda384b66fe482c6e25ec513ae3733cf06bebe1e894" exitCode=0 Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.469610 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"5404a5783280f4d9f20b0bda384b66fe482c6e25ec513ae3733cf06bebe1e894"} Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.469703 4765 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="901d0a63-c83f-4175-9e6f-70695c2ee2ff" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.469717 4765 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="901d0a63-c83f-4175-9e6f-70695c2ee2ff" Dec 10 06:52:12 crc kubenswrapper[4765]: E1210 06:52:12.469955 4765 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.469110 4765 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.470676 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.471013 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.471425 4765 status_manager.go:851] "Failed to get status for pod" podUID="2c029930-d67e-4812-a6e1-bb8d9b806655" pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-ljp6b\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.471621 4765 status_manager.go:851] "Failed to get status for pod" podUID="eef505b2-623b-437c-8207-550da3a806ba" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.471960 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.472275 4765 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.472475 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.472715 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.472950 4765 status_manager.go:851] "Failed to get status for pod" podUID="2c029930-d67e-4812-a6e1-bb8d9b806655" pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-ljp6b\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.473231 4765 status_manager.go:851] "Failed to get status for pod" podUID="eef505b2-623b-437c-8207-550da3a806ba" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.476297 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.476337 4765 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530" exitCode=1 Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.476368 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530"} Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.476789 4765 scope.go:117] "RemoveContainer" containerID="1f85a80ff626fcb13d1d72d9ab173af503095114f6c5672c8f08fdba46442530" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.476953 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.477279 4765 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.477526 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.477796 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.478068 4765 status_manager.go:851] "Failed to get status for pod" podUID="2c029930-d67e-4812-a6e1-bb8d9b806655" pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-ljp6b\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.478278 4765 status_manager.go:851] "Failed to get status for pod" podUID="eef505b2-623b-437c-8207-550da3a806ba" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.478459 4765 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.480550 4765 status_manager.go:851] "Failed to get status for pod" podUID="ea399b94-8696-405b-956e-a807aca44b1c" pod="openshift-marketplace/redhat-marketplace-98gbr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-98gbr\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.480838 4765 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.481060 4765 status_manager.go:851] "Failed to get status for pod" podUID="2c029930-d67e-4812-a6e1-bb8d9b806655" pod="openshift-authentication/oauth-openshift-558db77b4-ljp6b" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-ljp6b\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.481787 4765 status_manager.go:851] "Failed to get status for pod" podUID="eef505b2-623b-437c-8207-550da3a806ba" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.482059 4765 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.482380 4765 status_manager.go:851] "Failed to get status for pod" podUID="c0877bed-7b01-43a1-8704-eba99bb1e38d" pod="openshift-marketplace/redhat-operators-swxql" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-swxql\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.482656 4765 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.491182 4765 scope.go:117] "RemoveContainer" containerID="ba10ccf49ca766bca86227c81c8f1633eb66e01e2774efaefa1886103e06592a" Dec 10 06:52:12 crc kubenswrapper[4765]: E1210 06:52:12.491577 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba10ccf49ca766bca86227c81c8f1633eb66e01e2774efaefa1886103e06592a\": container with ID starting with ba10ccf49ca766bca86227c81c8f1633eb66e01e2774efaefa1886103e06592a not found: ID does not exist" containerID="ba10ccf49ca766bca86227c81c8f1633eb66e01e2774efaefa1886103e06592a" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.491616 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba10ccf49ca766bca86227c81c8f1633eb66e01e2774efaefa1886103e06592a"} err="failed to get container status \"ba10ccf49ca766bca86227c81c8f1633eb66e01e2774efaefa1886103e06592a\": rpc error: code = NotFound desc = could not find container \"ba10ccf49ca766bca86227c81c8f1633eb66e01e2774efaefa1886103e06592a\": container with ID starting with ba10ccf49ca766bca86227c81c8f1633eb66e01e2774efaefa1886103e06592a not found: ID does not exist" Dec 10 06:52:12 crc kubenswrapper[4765]: E1210 06:52:12.638422 4765 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.41:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" volumeName="registry-storage" Dec 10 06:52:12 crc kubenswrapper[4765]: I1210 06:52:12.651129 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:52:13 crc kubenswrapper[4765]: I1210 06:52:13.495273 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b7433d18a4044d81a95b02cc714aaf2c7324c8da982af9733ef3a2b1ce5da76c"} Dec 10 06:52:13 crc kubenswrapper[4765]: I1210 06:52:13.495603 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a997654d42369f57e39f976c3ac3190b90eee6534ec0ae0295fe2f6b534ff879"} Dec 10 06:52:13 crc kubenswrapper[4765]: I1210 06:52:13.495620 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5fcd9e539a53fa76d2a74a54e9581ff4a9abc515cfa07fc93ee47ae055d03307"} Dec 10 06:52:13 crc kubenswrapper[4765]: I1210 06:52:13.495630 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"8fbaa26d81905c0234bd00b75b83d6ff126770e8a033734609eb7d1eeac1c21a"} Dec 10 06:52:13 crc kubenswrapper[4765]: I1210 06:52:13.501340 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 10 06:52:13 crc kubenswrapper[4765]: I1210 06:52:13.501399 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"bd2697fb3d327b57707b9dc309250bcf96177eb0bb41a0cfb85203ab2751b083"} Dec 10 06:52:13 crc kubenswrapper[4765]: I1210 06:52:13.691639 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-swxql" Dec 10 06:52:13 crc kubenswrapper[4765]: I1210 06:52:13.725642 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-swxql" Dec 10 06:52:14 crc kubenswrapper[4765]: I1210 06:52:14.510328 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c85ef9bb8b34cc604105d574b126f8b92b820b51899713f1610f13b1617c5386"} Dec 10 06:52:14 crc kubenswrapper[4765]: I1210 06:52:14.511224 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:52:14 crc kubenswrapper[4765]: I1210 06:52:14.511428 4765 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="901d0a63-c83f-4175-9e6f-70695c2ee2ff" Dec 10 06:52:14 crc kubenswrapper[4765]: I1210 06:52:14.511532 4765 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="901d0a63-c83f-4175-9e6f-70695c2ee2ff" Dec 10 06:52:15 crc kubenswrapper[4765]: I1210 06:52:15.247777 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:52:17 crc kubenswrapper[4765]: I1210 06:52:17.603351 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:52:17 crc kubenswrapper[4765]: I1210 06:52:17.603415 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:52:17 crc kubenswrapper[4765]: I1210 06:52:17.608230 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:52:19 crc kubenswrapper[4765]: I1210 06:52:19.524239 4765 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:52:20 crc kubenswrapper[4765]: I1210 06:52:20.536295 4765 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="901d0a63-c83f-4175-9e6f-70695c2ee2ff" Dec 10 06:52:20 crc kubenswrapper[4765]: I1210 06:52:20.536599 4765 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="901d0a63-c83f-4175-9e6f-70695c2ee2ff" Dec 10 06:52:20 crc kubenswrapper[4765]: I1210 06:52:20.540852 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:52:20 crc kubenswrapper[4765]: I1210 06:52:20.542431 4765 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="53fec946-0486-4437-893e-8a4d454b9b5b" Dec 10 06:52:21 crc kubenswrapper[4765]: I1210 06:52:21.542235 4765 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="901d0a63-c83f-4175-9e6f-70695c2ee2ff" Dec 10 06:52:21 crc kubenswrapper[4765]: I1210 06:52:21.542267 4765 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="901d0a63-c83f-4175-9e6f-70695c2ee2ff" Dec 10 06:52:22 crc kubenswrapper[4765]: I1210 06:52:22.651569 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:52:22 crc kubenswrapper[4765]: I1210 06:52:22.655585 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:52:23 crc kubenswrapper[4765]: I1210 06:52:23.557103 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 06:52:27 crc kubenswrapper[4765]: I1210 06:52:27.608163 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 06:52:27 crc kubenswrapper[4765]: I1210 06:52:27.608912 4765 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="901d0a63-c83f-4175-9e6f-70695c2ee2ff" Dec 10 06:52:27 crc kubenswrapper[4765]: I1210 06:52:27.608932 4765 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="901d0a63-c83f-4175-9e6f-70695c2ee2ff" Dec 10 06:52:29 crc kubenswrapper[4765]: I1210 06:52:29.593636 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 10 06:52:29 crc kubenswrapper[4765]: I1210 06:52:29.686768 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 10 06:52:29 crc kubenswrapper[4765]: I1210 06:52:29.829118 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 10 06:52:29 crc kubenswrapper[4765]: I1210 06:52:29.864509 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 10 06:52:30 crc kubenswrapper[4765]: I1210 06:52:30.495893 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 10 06:52:30 crc kubenswrapper[4765]: I1210 06:52:30.529967 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 10 06:52:30 crc kubenswrapper[4765]: I1210 06:52:30.611245 4765 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="53fec946-0486-4437-893e-8a4d454b9b5b" Dec 10 06:52:31 crc kubenswrapper[4765]: I1210 06:52:31.130308 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 10 06:52:31 crc kubenswrapper[4765]: I1210 06:52:31.180981 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 10 06:52:31 crc kubenswrapper[4765]: I1210 06:52:31.200392 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 10 06:52:31 crc kubenswrapper[4765]: I1210 06:52:31.391542 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 10 06:52:31 crc kubenswrapper[4765]: I1210 06:52:31.590938 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 10 06:52:31 crc kubenswrapper[4765]: I1210 06:52:31.632683 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 10 06:52:31 crc kubenswrapper[4765]: I1210 06:52:31.880487 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 10 06:52:31 crc kubenswrapper[4765]: I1210 06:52:31.969242 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 10 06:52:32 crc kubenswrapper[4765]: I1210 06:52:32.047150 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 10 06:52:32 crc kubenswrapper[4765]: I1210 06:52:32.263443 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 10 06:52:32 crc kubenswrapper[4765]: I1210 06:52:32.353047 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 10 06:52:32 crc kubenswrapper[4765]: I1210 06:52:32.397757 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 10 06:52:32 crc kubenswrapper[4765]: I1210 06:52:32.850448 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 10 06:52:33 crc kubenswrapper[4765]: I1210 06:52:33.026385 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 10 06:52:33 crc kubenswrapper[4765]: I1210 06:52:33.212711 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 10 06:52:33 crc kubenswrapper[4765]: I1210 06:52:33.223945 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 10 06:52:33 crc kubenswrapper[4765]: I1210 06:52:33.255838 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 10 06:52:33 crc kubenswrapper[4765]: I1210 06:52:33.382434 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 10 06:52:33 crc kubenswrapper[4765]: I1210 06:52:33.424822 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 10 06:52:33 crc kubenswrapper[4765]: I1210 06:52:33.428685 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 10 06:52:33 crc kubenswrapper[4765]: I1210 06:52:33.508081 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 10 06:52:33 crc kubenswrapper[4765]: I1210 06:52:33.512995 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 10 06:52:33 crc kubenswrapper[4765]: I1210 06:52:33.670911 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 10 06:52:33 crc kubenswrapper[4765]: I1210 06:52:33.827853 4765 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 10 06:52:33 crc kubenswrapper[4765]: I1210 06:52:33.863006 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 10 06:52:33 crc kubenswrapper[4765]: I1210 06:52:33.873404 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 10 06:52:33 crc kubenswrapper[4765]: I1210 06:52:33.978835 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.094468 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.221297 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.244562 4765 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.246961 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=41.246944177 podStartE2EDuration="41.246944177s" podCreationTimestamp="2025-12-10 06:51:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:52:19.188540946 +0000 UTC m=+258.915206262" watchObservedRunningTime="2025-12-10 06:52:34.246944177 +0000 UTC m=+273.973609493" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.247610 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-swxql" podStartSLOduration=38.620529599 podStartE2EDuration="41.247603386s" podCreationTimestamp="2025-12-10 06:51:53 +0000 UTC" firstStartedPulling="2025-12-10 06:51:55.330098142 +0000 UTC m=+235.056763448" lastFinishedPulling="2025-12-10 06:51:57.957171919 +0000 UTC m=+237.683837235" observedRunningTime="2025-12-10 06:52:19.26597826 +0000 UTC m=+258.992643576" watchObservedRunningTime="2025-12-10 06:52:34.247603386 +0000 UTC m=+273.974268702" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.248832 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-98gbr" podStartSLOduration=39.665788463 podStartE2EDuration="41.248825251s" podCreationTimestamp="2025-12-10 06:51:53 +0000 UTC" firstStartedPulling="2025-12-10 06:51:55.332304414 +0000 UTC m=+235.058969730" lastFinishedPulling="2025-12-10 06:51:56.915341202 +0000 UTC m=+236.642006518" observedRunningTime="2025-12-10 06:52:19.178454772 +0000 UTC m=+258.905120088" watchObservedRunningTime="2025-12-10 06:52:34.248825251 +0000 UTC m=+273.975490577" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.254621 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-ljp6b","openshift-kube-apiserver/kube-apiserver-crc"] Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.254681 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.268246 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=15.268230193 podStartE2EDuration="15.268230193s" podCreationTimestamp="2025-12-10 06:52:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:52:34.268078819 +0000 UTC m=+273.994744135" watchObservedRunningTime="2025-12-10 06:52:34.268230193 +0000 UTC m=+273.994895509" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.290310 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.294246 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.331775 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.351285 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.406063 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.457469 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.476820 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.594495 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c029930-d67e-4812-a6e1-bb8d9b806655" path="/var/lib/kubelet/pods/2c029930-d67e-4812-a6e1-bb8d9b806655/volumes" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.626210 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.647575 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.848969 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.852069 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.952812 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.954743 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 10 06:52:34 crc kubenswrapper[4765]: I1210 06:52:34.971735 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 10 06:52:35 crc kubenswrapper[4765]: I1210 06:52:35.095582 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 10 06:52:35 crc kubenswrapper[4765]: I1210 06:52:35.238833 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 10 06:52:35 crc kubenswrapper[4765]: I1210 06:52:35.287216 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 10 06:52:35 crc kubenswrapper[4765]: I1210 06:52:35.335709 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 10 06:52:35 crc kubenswrapper[4765]: I1210 06:52:35.340398 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 10 06:52:35 crc kubenswrapper[4765]: I1210 06:52:35.384110 4765 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 10 06:52:35 crc kubenswrapper[4765]: I1210 06:52:35.429166 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 10 06:52:35 crc kubenswrapper[4765]: I1210 06:52:35.439690 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 10 06:52:35 crc kubenswrapper[4765]: I1210 06:52:35.500389 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 10 06:52:35 crc kubenswrapper[4765]: I1210 06:52:35.728906 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 10 06:52:35 crc kubenswrapper[4765]: I1210 06:52:35.742363 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 10 06:52:35 crc kubenswrapper[4765]: I1210 06:52:35.927261 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 10 06:52:36 crc kubenswrapper[4765]: I1210 06:52:36.033749 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 10 06:52:36 crc kubenswrapper[4765]: I1210 06:52:36.198987 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 10 06:52:36 crc kubenswrapper[4765]: I1210 06:52:36.344355 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 10 06:52:36 crc kubenswrapper[4765]: I1210 06:52:36.362377 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 10 06:52:36 crc kubenswrapper[4765]: I1210 06:52:36.589862 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 10 06:52:36 crc kubenswrapper[4765]: I1210 06:52:36.676904 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 10 06:52:36 crc kubenswrapper[4765]: I1210 06:52:36.881890 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 10 06:52:36 crc kubenswrapper[4765]: I1210 06:52:36.921431 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 10 06:52:36 crc kubenswrapper[4765]: I1210 06:52:36.950424 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 10 06:52:36 crc kubenswrapper[4765]: I1210 06:52:36.971398 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.103188 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.112115 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.218580 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.228045 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.311124 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.364575 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.381609 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-56c748df47-mzc6k"] Dec 10 06:52:37 crc kubenswrapper[4765]: E1210 06:52:37.381998 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c029930-d67e-4812-a6e1-bb8d9b806655" containerName="oauth-openshift" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.382076 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c029930-d67e-4812-a6e1-bb8d9b806655" containerName="oauth-openshift" Dec 10 06:52:37 crc kubenswrapper[4765]: E1210 06:52:37.382194 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eef505b2-623b-437c-8207-550da3a806ba" containerName="installer" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.382273 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="eef505b2-623b-437c-8207-550da3a806ba" containerName="installer" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.382485 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c029930-d67e-4812-a6e1-bb8d9b806655" containerName="oauth-openshift" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.382588 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="eef505b2-623b-437c-8207-550da3a806ba" containerName="installer" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.382998 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.388675 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.388694 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.388796 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.388811 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.389041 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.389252 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.389507 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.389774 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.389826 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.389946 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.390207 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.390588 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.399023 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.411645 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.412462 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-56c748df47-mzc6k"] Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.426917 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.441925 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.450116 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.533626 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.533675 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.533695 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.533764 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-user-template-login\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.533838 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktjn9\" (UniqueName: \"kubernetes.io/projected/6f6430a0-f7ef-44c5-8036-8458d05bfea0-kube-api-access-ktjn9\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.533869 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6f6430a0-f7ef-44c5-8036-8458d05bfea0-audit-policies\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.533891 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-router-certs\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.533915 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6f6430a0-f7ef-44c5-8036-8458d05bfea0-audit-dir\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.533970 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.533996 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.534018 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-service-ca\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.534041 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-user-template-error\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.534064 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.534108 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-session\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.634922 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktjn9\" (UniqueName: \"kubernetes.io/projected/6f6430a0-f7ef-44c5-8036-8458d05bfea0-kube-api-access-ktjn9\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.634983 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6f6430a0-f7ef-44c5-8036-8458d05bfea0-audit-policies\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.635002 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-router-certs\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.635020 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6f6430a0-f7ef-44c5-8036-8458d05bfea0-audit-dir\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.635052 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.635073 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-service-ca\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.635106 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.635124 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-user-template-error\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.635141 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.635159 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-session\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.635182 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.635200 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.635218 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.635240 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-user-template-login\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.636176 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6f6430a0-f7ef-44c5-8036-8458d05bfea0-audit-dir\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.636837 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6f6430a0-f7ef-44c5-8036-8458d05bfea0-audit-policies\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.637362 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.637495 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-service-ca\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.638643 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.640197 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-router-certs\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.640404 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.640607 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-session\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.640742 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-user-template-error\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.641425 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.641557 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-user-template-login\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.641616 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.642145 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6f6430a0-f7ef-44c5-8036-8458d05bfea0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.654067 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktjn9\" (UniqueName: \"kubernetes.io/projected/6f6430a0-f7ef-44c5-8036-8458d05bfea0-kube-api-access-ktjn9\") pod \"oauth-openshift-56c748df47-mzc6k\" (UID: \"6f6430a0-f7ef-44c5-8036-8458d05bfea0\") " pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.711616 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.714481 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.832699 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.889121 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.998586 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 10 06:52:37 crc kubenswrapper[4765]: I1210 06:52:37.998802 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 10 06:52:38 crc kubenswrapper[4765]: I1210 06:52:38.024043 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 10 06:52:38 crc kubenswrapper[4765]: I1210 06:52:38.031745 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 10 06:52:38 crc kubenswrapper[4765]: I1210 06:52:38.038254 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 10 06:52:38 crc kubenswrapper[4765]: I1210 06:52:38.074402 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 10 06:52:38 crc kubenswrapper[4765]: I1210 06:52:38.099823 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 10 06:52:38 crc kubenswrapper[4765]: I1210 06:52:38.144985 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 10 06:52:38 crc kubenswrapper[4765]: I1210 06:52:38.205271 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 10 06:52:38 crc kubenswrapper[4765]: I1210 06:52:38.214885 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 10 06:52:38 crc kubenswrapper[4765]: I1210 06:52:38.222704 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 10 06:52:38 crc kubenswrapper[4765]: I1210 06:52:38.241134 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 10 06:52:38 crc kubenswrapper[4765]: I1210 06:52:38.523594 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 10 06:52:38 crc kubenswrapper[4765]: I1210 06:52:38.556563 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 10 06:52:38 crc kubenswrapper[4765]: I1210 06:52:38.579346 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 10 06:52:38 crc kubenswrapper[4765]: I1210 06:52:38.783265 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 10 06:52:38 crc kubenswrapper[4765]: I1210 06:52:38.827832 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 10 06:52:38 crc kubenswrapper[4765]: I1210 06:52:38.863504 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 10 06:52:38 crc kubenswrapper[4765]: I1210 06:52:38.958274 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 10 06:52:38 crc kubenswrapper[4765]: I1210 06:52:38.996962 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.054616 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-56c748df47-mzc6k"] Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.140731 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.215558 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.215964 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.245890 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.293544 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.311602 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.312468 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.355958 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.445287 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.468996 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.554437 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.575435 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.620935 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.625173 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" event={"ID":"6f6430a0-f7ef-44c5-8036-8458d05bfea0","Type":"ContainerStarted","Data":"b8b3ff2e5d9efbc8cc2f729df6ab89fd8e41b4ec3133330407b3df450ea200bf"} Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.625218 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" event={"ID":"6f6430a0-f7ef-44c5-8036-8458d05bfea0","Type":"ContainerStarted","Data":"3e63e3c582a95be9401cc86f70ed5a36cd46382e19782e2a7d423965354d1894"} Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.625510 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.645702 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" podStartSLOduration=53.645686439 podStartE2EDuration="53.645686439s" podCreationTimestamp="2025-12-10 06:51:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:52:39.643025503 +0000 UTC m=+279.369690839" watchObservedRunningTime="2025-12-10 06:52:39.645686439 +0000 UTC m=+279.372351755" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.661131 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.757007 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.758817 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.772713 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.779808 4765 patch_prober.go:28] interesting pod/oauth-openshift-56c748df47-mzc6k container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.60:6443/healthz\": read tcp 10.217.0.2:50900->10.217.0.60:6443: read: connection reset by peer" start-of-body= Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.779863 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" podUID="6f6430a0-f7ef-44c5-8036-8458d05bfea0" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.60:6443/healthz\": read tcp 10.217.0.2:50900->10.217.0.60:6443: read: connection reset by peer" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.945514 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 10 06:52:39 crc kubenswrapper[4765]: I1210 06:52:39.950246 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 10 06:52:40 crc kubenswrapper[4765]: I1210 06:52:40.029234 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 10 06:52:40 crc kubenswrapper[4765]: I1210 06:52:40.233016 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 10 06:52:40 crc kubenswrapper[4765]: I1210 06:52:40.247607 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 10 06:52:40 crc kubenswrapper[4765]: I1210 06:52:40.400790 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 10 06:52:40 crc kubenswrapper[4765]: I1210 06:52:40.506127 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 10 06:52:40 crc kubenswrapper[4765]: I1210 06:52:40.549158 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 10 06:52:40 crc kubenswrapper[4765]: I1210 06:52:40.568481 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 10 06:52:40 crc kubenswrapper[4765]: I1210 06:52:40.630591 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 10 06:52:40 crc kubenswrapper[4765]: I1210 06:52:40.631445 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-56c748df47-mzc6k_6f6430a0-f7ef-44c5-8036-8458d05bfea0/oauth-openshift/0.log" Dec 10 06:52:40 crc kubenswrapper[4765]: I1210 06:52:40.631488 4765 generic.go:334] "Generic (PLEG): container finished" podID="6f6430a0-f7ef-44c5-8036-8458d05bfea0" containerID="b8b3ff2e5d9efbc8cc2f729df6ab89fd8e41b4ec3133330407b3df450ea200bf" exitCode=255 Dec 10 06:52:40 crc kubenswrapper[4765]: I1210 06:52:40.631525 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" event={"ID":"6f6430a0-f7ef-44c5-8036-8458d05bfea0","Type":"ContainerDied","Data":"b8b3ff2e5d9efbc8cc2f729df6ab89fd8e41b4ec3133330407b3df450ea200bf"} Dec 10 06:52:40 crc kubenswrapper[4765]: I1210 06:52:40.632008 4765 scope.go:117] "RemoveContainer" containerID="b8b3ff2e5d9efbc8cc2f729df6ab89fd8e41b4ec3133330407b3df450ea200bf" Dec 10 06:52:40 crc kubenswrapper[4765]: I1210 06:52:40.651351 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 10 06:52:40 crc kubenswrapper[4765]: I1210 06:52:40.779553 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 10 06:52:40 crc kubenswrapper[4765]: I1210 06:52:40.870183 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 10 06:52:40 crc kubenswrapper[4765]: I1210 06:52:40.882664 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 10 06:52:40 crc kubenswrapper[4765]: I1210 06:52:40.938196 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.053385 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.182938 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.231219 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.272901 4765 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.275707 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.373869 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.373983 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.392741 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.471351 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.492653 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.532054 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.565805 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.571580 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.619347 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.637292 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-56c748df47-mzc6k_6f6430a0-f7ef-44c5-8036-8458d05bfea0/oauth-openshift/0.log" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.637350 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" event={"ID":"6f6430a0-f7ef-44c5-8036-8458d05bfea0","Type":"ContainerStarted","Data":"ae4815a539a4655e24598052a633f0f618ed5f4f73a773e6b506b14bd5ab217d"} Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.637619 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.790975 4765 patch_prober.go:28] interesting pod/oauth-openshift-56c748df47-mzc6k container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.60:6443/healthz\": read tcp 10.217.0.2:50902->10.217.0.60:6443: read: connection reset by peer" start-of-body= Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.791022 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" podUID="6f6430a0-f7ef-44c5-8036-8458d05bfea0" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.60:6443/healthz\": read tcp 10.217.0.2:50902->10.217.0.60:6443: read: connection reset by peer" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.872588 4765 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.872839 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://f7f958e6bc6dbfce66c0f5c7c0a1ae59209c8e96e51228047edf42b55dde82d6" gracePeriod=5 Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.944018 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.976025 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 10 06:52:41 crc kubenswrapper[4765]: I1210 06:52:41.997764 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 10 06:52:42 crc kubenswrapper[4765]: I1210 06:52:42.053349 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 10 06:52:42 crc kubenswrapper[4765]: I1210 06:52:42.059821 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 10 06:52:42 crc kubenswrapper[4765]: I1210 06:52:42.266269 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 10 06:52:42 crc kubenswrapper[4765]: I1210 06:52:42.312351 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 10 06:52:42 crc kubenswrapper[4765]: I1210 06:52:42.329122 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 10 06:52:42 crc kubenswrapper[4765]: I1210 06:52:42.369512 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 10 06:52:42 crc kubenswrapper[4765]: I1210 06:52:42.378441 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 10 06:52:42 crc kubenswrapper[4765]: I1210 06:52:42.456024 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 10 06:52:42 crc kubenswrapper[4765]: I1210 06:52:42.555052 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 10 06:52:42 crc kubenswrapper[4765]: I1210 06:52:42.645741 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-56c748df47-mzc6k_6f6430a0-f7ef-44c5-8036-8458d05bfea0/oauth-openshift/1.log" Dec 10 06:52:42 crc kubenswrapper[4765]: I1210 06:52:42.646219 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-56c748df47-mzc6k_6f6430a0-f7ef-44c5-8036-8458d05bfea0/oauth-openshift/0.log" Dec 10 06:52:42 crc kubenswrapper[4765]: I1210 06:52:42.646262 4765 generic.go:334] "Generic (PLEG): container finished" podID="6f6430a0-f7ef-44c5-8036-8458d05bfea0" containerID="ae4815a539a4655e24598052a633f0f618ed5f4f73a773e6b506b14bd5ab217d" exitCode=255 Dec 10 06:52:42 crc kubenswrapper[4765]: I1210 06:52:42.646308 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" event={"ID":"6f6430a0-f7ef-44c5-8036-8458d05bfea0","Type":"ContainerDied","Data":"ae4815a539a4655e24598052a633f0f618ed5f4f73a773e6b506b14bd5ab217d"} Dec 10 06:52:42 crc kubenswrapper[4765]: I1210 06:52:42.646348 4765 scope.go:117] "RemoveContainer" containerID="b8b3ff2e5d9efbc8cc2f729df6ab89fd8e41b4ec3133330407b3df450ea200bf" Dec 10 06:52:42 crc kubenswrapper[4765]: I1210 06:52:42.647067 4765 scope.go:117] "RemoveContainer" containerID="ae4815a539a4655e24598052a633f0f618ed5f4f73a773e6b506b14bd5ab217d" Dec 10 06:52:42 crc kubenswrapper[4765]: E1210 06:52:42.647413 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-56c748df47-mzc6k_openshift-authentication(6f6430a0-f7ef-44c5-8036-8458d05bfea0)\"" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" podUID="6f6430a0-f7ef-44c5-8036-8458d05bfea0" Dec 10 06:52:42 crc kubenswrapper[4765]: I1210 06:52:42.684546 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 10 06:52:42 crc kubenswrapper[4765]: I1210 06:52:42.913870 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 10 06:52:43 crc kubenswrapper[4765]: I1210 06:52:43.039904 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 10 06:52:43 crc kubenswrapper[4765]: I1210 06:52:43.072445 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 10 06:52:43 crc kubenswrapper[4765]: I1210 06:52:43.172953 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 10 06:52:43 crc kubenswrapper[4765]: I1210 06:52:43.246304 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 10 06:52:43 crc kubenswrapper[4765]: I1210 06:52:43.654628 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-56c748df47-mzc6k_6f6430a0-f7ef-44c5-8036-8458d05bfea0/oauth-openshift/1.log" Dec 10 06:52:43 crc kubenswrapper[4765]: I1210 06:52:43.655053 4765 scope.go:117] "RemoveContainer" containerID="ae4815a539a4655e24598052a633f0f618ed5f4f73a773e6b506b14bd5ab217d" Dec 10 06:52:43 crc kubenswrapper[4765]: E1210 06:52:43.655277 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-56c748df47-mzc6k_openshift-authentication(6f6430a0-f7ef-44c5-8036-8458d05bfea0)\"" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" podUID="6f6430a0-f7ef-44c5-8036-8458d05bfea0" Dec 10 06:52:43 crc kubenswrapper[4765]: I1210 06:52:43.718323 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 10 06:52:43 crc kubenswrapper[4765]: I1210 06:52:43.852914 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 10 06:52:43 crc kubenswrapper[4765]: I1210 06:52:43.858699 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 10 06:52:43 crc kubenswrapper[4765]: I1210 06:52:43.891781 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 10 06:52:43 crc kubenswrapper[4765]: I1210 06:52:43.939583 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 10 06:52:43 crc kubenswrapper[4765]: I1210 06:52:43.998450 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 10 06:52:44 crc kubenswrapper[4765]: I1210 06:52:44.243851 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 10 06:52:44 crc kubenswrapper[4765]: I1210 06:52:44.321411 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 10 06:52:44 crc kubenswrapper[4765]: I1210 06:52:44.571958 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 10 06:52:44 crc kubenswrapper[4765]: I1210 06:52:44.580605 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 10 06:52:44 crc kubenswrapper[4765]: I1210 06:52:44.985723 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.433626 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.433711 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.560058 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.560251 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.560267 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.560284 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.560328 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.560373 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.560429 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.560480 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.560611 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.560767 4765 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.560791 4765 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.560801 4765 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.560810 4765 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.567402 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.661393 4765 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.675240 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.675286 4765 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="f7f958e6bc6dbfce66c0f5c7c0a1ae59209c8e96e51228047edf42b55dde82d6" exitCode=137 Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.675328 4765 scope.go:117] "RemoveContainer" containerID="f7f958e6bc6dbfce66c0f5c7c0a1ae59209c8e96e51228047edf42b55dde82d6" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.675434 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.691215 4765 scope.go:117] "RemoveContainer" containerID="f7f958e6bc6dbfce66c0f5c7c0a1ae59209c8e96e51228047edf42b55dde82d6" Dec 10 06:52:47 crc kubenswrapper[4765]: E1210 06:52:47.691783 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7f958e6bc6dbfce66c0f5c7c0a1ae59209c8e96e51228047edf42b55dde82d6\": container with ID starting with f7f958e6bc6dbfce66c0f5c7c0a1ae59209c8e96e51228047edf42b55dde82d6 not found: ID does not exist" containerID="f7f958e6bc6dbfce66c0f5c7c0a1ae59209c8e96e51228047edf42b55dde82d6" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.691826 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7f958e6bc6dbfce66c0f5c7c0a1ae59209c8e96e51228047edf42b55dde82d6"} err="failed to get container status \"f7f958e6bc6dbfce66c0f5c7c0a1ae59209c8e96e51228047edf42b55dde82d6\": rpc error: code = NotFound desc = could not find container \"f7f958e6bc6dbfce66c0f5c7c0a1ae59209c8e96e51228047edf42b55dde82d6\": container with ID starting with f7f958e6bc6dbfce66c0f5c7c0a1ae59209c8e96e51228047edf42b55dde82d6 not found: ID does not exist" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.715290 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:52:47 crc kubenswrapper[4765]: I1210 06:52:47.716019 4765 scope.go:117] "RemoveContainer" containerID="ae4815a539a4655e24598052a633f0f618ed5f4f73a773e6b506b14bd5ab217d" Dec 10 06:52:47 crc kubenswrapper[4765]: E1210 06:52:47.716307 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-56c748df47-mzc6k_openshift-authentication(6f6430a0-f7ef-44c5-8036-8458d05bfea0)\"" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" podUID="6f6430a0-f7ef-44c5-8036-8458d05bfea0" Dec 10 06:52:48 crc kubenswrapper[4765]: I1210 06:52:48.595477 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 10 06:52:48 crc kubenswrapper[4765]: I1210 06:52:48.595759 4765 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 10 06:52:48 crc kubenswrapper[4765]: I1210 06:52:48.603396 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 10 06:52:48 crc kubenswrapper[4765]: I1210 06:52:48.603694 4765 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="bbef676d-e3b2-432e-bf95-f31fea4d814b" Dec 10 06:52:48 crc kubenswrapper[4765]: I1210 06:52:48.606890 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 10 06:52:48 crc kubenswrapper[4765]: I1210 06:52:48.606932 4765 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="bbef676d-e3b2-432e-bf95-f31fea4d814b" Dec 10 06:52:51 crc kubenswrapper[4765]: I1210 06:52:51.641479 4765 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 10 06:52:52 crc kubenswrapper[4765]: I1210 06:52:52.463748 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 10 06:52:56 crc kubenswrapper[4765]: I1210 06:52:56.295986 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 10 06:52:57 crc kubenswrapper[4765]: I1210 06:52:57.231842 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 10 06:52:58 crc kubenswrapper[4765]: I1210 06:52:58.160027 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 10 06:52:58 crc kubenswrapper[4765]: I1210 06:52:58.363972 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 10 06:52:58 crc kubenswrapper[4765]: I1210 06:52:58.547409 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 10 06:52:59 crc kubenswrapper[4765]: I1210 06:52:59.035258 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 10 06:52:59 crc kubenswrapper[4765]: I1210 06:52:59.527065 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 10 06:52:59 crc kubenswrapper[4765]: I1210 06:52:59.544330 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 10 06:53:00 crc kubenswrapper[4765]: I1210 06:53:00.011629 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 10 06:53:00 crc kubenswrapper[4765]: I1210 06:53:00.088112 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 10 06:53:00 crc kubenswrapper[4765]: I1210 06:53:00.425313 4765 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 10 06:53:00 crc kubenswrapper[4765]: I1210 06:53:00.853689 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 10 06:53:00 crc kubenswrapper[4765]: I1210 06:53:00.938052 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 10 06:53:02 crc kubenswrapper[4765]: I1210 06:53:02.298073 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 10 06:53:02 crc kubenswrapper[4765]: I1210 06:53:02.589688 4765 scope.go:117] "RemoveContainer" containerID="ae4815a539a4655e24598052a633f0f618ed5f4f73a773e6b506b14bd5ab217d" Dec 10 06:53:02 crc kubenswrapper[4765]: I1210 06:53:02.643913 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 10 06:53:03 crc kubenswrapper[4765]: I1210 06:53:03.484681 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 10 06:53:03 crc kubenswrapper[4765]: I1210 06:53:03.536894 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 10 06:53:03 crc kubenswrapper[4765]: I1210 06:53:03.752226 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-56c748df47-mzc6k_6f6430a0-f7ef-44c5-8036-8458d05bfea0/oauth-openshift/1.log" Dec 10 06:53:03 crc kubenswrapper[4765]: I1210 06:53:03.752285 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" event={"ID":"6f6430a0-f7ef-44c5-8036-8458d05bfea0","Type":"ContainerStarted","Data":"6566ac7439e20221f2e3665e786c17f312cfcc9127f2d31b6033b892e6991dc3"} Dec 10 06:53:03 crc kubenswrapper[4765]: I1210 06:53:03.752573 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:53:03 crc kubenswrapper[4765]: I1210 06:53:03.755875 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 10 06:53:03 crc kubenswrapper[4765]: I1210 06:53:03.757895 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-56c748df47-mzc6k" Dec 10 06:53:04 crc kubenswrapper[4765]: I1210 06:53:04.123105 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 10 06:53:04 crc kubenswrapper[4765]: I1210 06:53:04.360387 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 10 06:53:04 crc kubenswrapper[4765]: I1210 06:53:04.827495 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 10 06:53:05 crc kubenswrapper[4765]: I1210 06:53:05.648963 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 10 06:53:06 crc kubenswrapper[4765]: I1210 06:53:06.811339 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 10 06:53:07 crc kubenswrapper[4765]: I1210 06:53:07.220729 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 10 06:53:07 crc kubenswrapper[4765]: I1210 06:53:07.787578 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 10 06:53:07 crc kubenswrapper[4765]: I1210 06:53:07.926643 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 10 06:53:08 crc kubenswrapper[4765]: I1210 06:53:08.196029 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 10 06:53:08 crc kubenswrapper[4765]: I1210 06:53:08.352308 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 10 06:53:08 crc kubenswrapper[4765]: I1210 06:53:08.605262 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 10 06:53:08 crc kubenswrapper[4765]: I1210 06:53:08.911078 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 10 06:53:09 crc kubenswrapper[4765]: I1210 06:53:09.275245 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 10 06:53:09 crc kubenswrapper[4765]: I1210 06:53:09.300709 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 10 06:53:09 crc kubenswrapper[4765]: I1210 06:53:09.416678 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 10 06:53:09 crc kubenswrapper[4765]: I1210 06:53:09.744944 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 10 06:53:10 crc kubenswrapper[4765]: I1210 06:53:10.096796 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 10 06:53:10 crc kubenswrapper[4765]: I1210 06:53:10.249849 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 10 06:53:10 crc kubenswrapper[4765]: I1210 06:53:10.638465 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 10 06:53:10 crc kubenswrapper[4765]: I1210 06:53:10.703746 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 10 06:53:10 crc kubenswrapper[4765]: I1210 06:53:10.967432 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 10 06:53:11 crc kubenswrapper[4765]: I1210 06:53:11.088163 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 10 06:53:12 crc kubenswrapper[4765]: I1210 06:53:12.192706 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 10 06:53:12 crc kubenswrapper[4765]: I1210 06:53:12.301475 4765 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 10 06:53:13 crc kubenswrapper[4765]: I1210 06:53:13.716370 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 10 06:53:13 crc kubenswrapper[4765]: I1210 06:53:13.898705 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 10 06:53:14 crc kubenswrapper[4765]: I1210 06:53:14.635533 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 10 06:53:14 crc kubenswrapper[4765]: I1210 06:53:14.895860 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 10 06:53:15 crc kubenswrapper[4765]: I1210 06:53:15.118866 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 10 06:53:15 crc kubenswrapper[4765]: I1210 06:53:15.364333 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 10 06:53:16 crc kubenswrapper[4765]: I1210 06:53:16.660427 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 10 06:53:17 crc kubenswrapper[4765]: I1210 06:53:17.373203 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 10 06:53:18 crc kubenswrapper[4765]: I1210 06:53:18.853303 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 10 06:53:20 crc kubenswrapper[4765]: I1210 06:53:20.109132 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 10 06:53:21 crc kubenswrapper[4765]: I1210 06:53:21.684685 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 10 06:53:21 crc kubenswrapper[4765]: I1210 06:53:21.909944 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 10 06:53:22 crc kubenswrapper[4765]: I1210 06:53:22.719409 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 10 06:53:23 crc kubenswrapper[4765]: I1210 06:53:23.363255 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.033147 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-f6lqh"] Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.034861 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" podUID="f0f8b6b6-1a80-4835-b50e-26dc93c985a0" containerName="controller-manager" containerID="cri-o://6b7b9d988a0aadd48fb9acae4b0b505e0f09cef1e82a4959707530005da82f90" gracePeriod=30 Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.130720 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d"] Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.130968 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" podUID="0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f" containerName="route-controller-manager" containerID="cri-o://b484653e66c6043a6a4438f8bcff92d255400e9144dbc8d0d7894c4aa58a7934" gracePeriod=30 Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.343376 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.448212 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.463905 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-serving-cert\") pod \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.463951 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcnkb\" (UniqueName: \"kubernetes.io/projected/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-kube-api-access-kcnkb\") pod \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.463973 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-config\") pod \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.464025 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-proxy-ca-bundles\") pod \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.464146 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-client-ca\") pod \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\" (UID: \"f0f8b6b6-1a80-4835-b50e-26dc93c985a0\") " Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.466140 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-client-ca" (OuterVolumeSpecName: "client-ca") pod "f0f8b6b6-1a80-4835-b50e-26dc93c985a0" (UID: "f0f8b6b6-1a80-4835-b50e-26dc93c985a0"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.466215 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-config" (OuterVolumeSpecName: "config") pod "f0f8b6b6-1a80-4835-b50e-26dc93c985a0" (UID: "f0f8b6b6-1a80-4835-b50e-26dc93c985a0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.467521 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "f0f8b6b6-1a80-4835-b50e-26dc93c985a0" (UID: "f0f8b6b6-1a80-4835-b50e-26dc93c985a0"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.472442 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f0f8b6b6-1a80-4835-b50e-26dc93c985a0" (UID: "f0f8b6b6-1a80-4835-b50e-26dc93c985a0"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.472680 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-kube-api-access-kcnkb" (OuterVolumeSpecName: "kube-api-access-kcnkb") pod "f0f8b6b6-1a80-4835-b50e-26dc93c985a0" (UID: "f0f8b6b6-1a80-4835-b50e-26dc93c985a0"). InnerVolumeSpecName "kube-api-access-kcnkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.564972 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-serving-cert\") pod \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\" (UID: \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\") " Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.565015 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-config\") pod \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\" (UID: \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\") " Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.565113 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gc2hr\" (UniqueName: \"kubernetes.io/projected/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-kube-api-access-gc2hr\") pod \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\" (UID: \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\") " Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.565134 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-client-ca\") pod \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\" (UID: \"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f\") " Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.565308 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.565320 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcnkb\" (UniqueName: \"kubernetes.io/projected/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-kube-api-access-kcnkb\") on node \"crc\" DevicePath \"\"" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.565329 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.565339 4765 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.565347 4765 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f0f8b6b6-1a80-4835-b50e-26dc93c985a0-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.566316 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-client-ca" (OuterVolumeSpecName: "client-ca") pod "0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f" (UID: "0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.566726 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-config" (OuterVolumeSpecName: "config") pod "0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f" (UID: "0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.568367 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-kube-api-access-gc2hr" (OuterVolumeSpecName: "kube-api-access-gc2hr") pod "0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f" (UID: "0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f"). InnerVolumeSpecName "kube-api-access-gc2hr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.568553 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f" (UID: "0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.666193 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.666224 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.666234 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gc2hr\" (UniqueName: \"kubernetes.io/projected/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-kube-api-access-gc2hr\") on node \"crc\" DevicePath \"\"" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.666242 4765 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.930841 4765 generic.go:334] "Generic (PLEG): container finished" podID="f0f8b6b6-1a80-4835-b50e-26dc93c985a0" containerID="6b7b9d988a0aadd48fb9acae4b0b505e0f09cef1e82a4959707530005da82f90" exitCode=0 Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.930921 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.933341 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.934453 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" event={"ID":"f0f8b6b6-1a80-4835-b50e-26dc93c985a0","Type":"ContainerDied","Data":"6b7b9d988a0aadd48fb9acae4b0b505e0f09cef1e82a4959707530005da82f90"} Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.934511 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-f6lqh" event={"ID":"f0f8b6b6-1a80-4835-b50e-26dc93c985a0","Type":"ContainerDied","Data":"28e96d85fce80b0b262c45aa6f4bed3efe02f9bbc2e2632cd3023d70996ca6b6"} Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.934530 4765 scope.go:117] "RemoveContainer" containerID="6b7b9d988a0aadd48fb9acae4b0b505e0f09cef1e82a4959707530005da82f90" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.934529 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" event={"ID":"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f","Type":"ContainerDied","Data":"b484653e66c6043a6a4438f8bcff92d255400e9144dbc8d0d7894c4aa58a7934"} Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.934460 4765 generic.go:334] "Generic (PLEG): container finished" podID="0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f" containerID="b484653e66c6043a6a4438f8bcff92d255400e9144dbc8d0d7894c4aa58a7934" exitCode=0 Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.934680 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d" event={"ID":"0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f","Type":"ContainerDied","Data":"966630f6c55c1b58a9f3ad515f47c66096ec89624c4e054cfaa797c27981e070"} Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.947285 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-f6lqh"] Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.954356 4765 scope.go:117] "RemoveContainer" containerID="6b7b9d988a0aadd48fb9acae4b0b505e0f09cef1e82a4959707530005da82f90" Dec 10 06:53:38 crc kubenswrapper[4765]: E1210 06:53:38.954959 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b7b9d988a0aadd48fb9acae4b0b505e0f09cef1e82a4959707530005da82f90\": container with ID starting with 6b7b9d988a0aadd48fb9acae4b0b505e0f09cef1e82a4959707530005da82f90 not found: ID does not exist" containerID="6b7b9d988a0aadd48fb9acae4b0b505e0f09cef1e82a4959707530005da82f90" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.955030 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b7b9d988a0aadd48fb9acae4b0b505e0f09cef1e82a4959707530005da82f90"} err="failed to get container status \"6b7b9d988a0aadd48fb9acae4b0b505e0f09cef1e82a4959707530005da82f90\": rpc error: code = NotFound desc = could not find container \"6b7b9d988a0aadd48fb9acae4b0b505e0f09cef1e82a4959707530005da82f90\": container with ID starting with 6b7b9d988a0aadd48fb9acae4b0b505e0f09cef1e82a4959707530005da82f90 not found: ID does not exist" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.955065 4765 scope.go:117] "RemoveContainer" containerID="b484653e66c6043a6a4438f8bcff92d255400e9144dbc8d0d7894c4aa58a7934" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.957021 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-f6lqh"] Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.968423 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d"] Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.972605 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-w787d"] Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.974354 4765 scope.go:117] "RemoveContainer" containerID="b484653e66c6043a6a4438f8bcff92d255400e9144dbc8d0d7894c4aa58a7934" Dec 10 06:53:38 crc kubenswrapper[4765]: E1210 06:53:38.974725 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b484653e66c6043a6a4438f8bcff92d255400e9144dbc8d0d7894c4aa58a7934\": container with ID starting with b484653e66c6043a6a4438f8bcff92d255400e9144dbc8d0d7894c4aa58a7934 not found: ID does not exist" containerID="b484653e66c6043a6a4438f8bcff92d255400e9144dbc8d0d7894c4aa58a7934" Dec 10 06:53:38 crc kubenswrapper[4765]: I1210 06:53:38.974757 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b484653e66c6043a6a4438f8bcff92d255400e9144dbc8d0d7894c4aa58a7934"} err="failed to get container status \"b484653e66c6043a6a4438f8bcff92d255400e9144dbc8d0d7894c4aa58a7934\": rpc error: code = NotFound desc = could not find container \"b484653e66c6043a6a4438f8bcff92d255400e9144dbc8d0d7894c4aa58a7934\": container with ID starting with b484653e66c6043a6a4438f8bcff92d255400e9144dbc8d0d7894c4aa58a7934 not found: ID does not exist" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.581725 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx"] Dec 10 06:53:39 crc kubenswrapper[4765]: E1210 06:53:39.582037 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f" containerName="route-controller-manager" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.582053 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f" containerName="route-controller-manager" Dec 10 06:53:39 crc kubenswrapper[4765]: E1210 06:53:39.582067 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0f8b6b6-1a80-4835-b50e-26dc93c985a0" containerName="controller-manager" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.582075 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0f8b6b6-1a80-4835-b50e-26dc93c985a0" containerName="controller-manager" Dec 10 06:53:39 crc kubenswrapper[4765]: E1210 06:53:39.582115 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.582123 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.582293 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0f8b6b6-1a80-4835-b50e-26dc93c985a0" containerName="controller-manager" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.582310 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f" containerName="route-controller-manager" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.582325 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.582815 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.586036 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-58f8979484-ndncp"] Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.586292 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.586634 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.586851 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.586940 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.587198 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.587795 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.592456 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.593246 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.593357 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.593383 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.593448 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.593511 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.593686 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.599605 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.608382 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-58f8979484-ndncp"] Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.611708 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx"] Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.679745 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4l27\" (UniqueName: \"kubernetes.io/projected/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-kube-api-access-k4l27\") pod \"controller-manager-58f8979484-ndncp\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.679826 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-serving-cert\") pod \"controller-manager-58f8979484-ndncp\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.679853 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-client-ca\") pod \"controller-manager-58f8979484-ndncp\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.679883 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-proxy-ca-bundles\") pod \"controller-manager-58f8979484-ndncp\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.679905 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-config\") pod \"controller-manager-58f8979484-ndncp\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.679945 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/785aaa2a-c1ec-4f04-a333-fa8528ce5688-serving-cert\") pod \"route-controller-manager-596dc78bdd-cbljx\" (UID: \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.679973 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/785aaa2a-c1ec-4f04-a333-fa8528ce5688-client-ca\") pod \"route-controller-manager-596dc78bdd-cbljx\" (UID: \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.680004 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmsjk\" (UniqueName: \"kubernetes.io/projected/785aaa2a-c1ec-4f04-a333-fa8528ce5688-kube-api-access-qmsjk\") pod \"route-controller-manager-596dc78bdd-cbljx\" (UID: \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.680025 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/785aaa2a-c1ec-4f04-a333-fa8528ce5688-config\") pod \"route-controller-manager-596dc78bdd-cbljx\" (UID: \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.781716 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/785aaa2a-c1ec-4f04-a333-fa8528ce5688-client-ca\") pod \"route-controller-manager-596dc78bdd-cbljx\" (UID: \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.781790 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmsjk\" (UniqueName: \"kubernetes.io/projected/785aaa2a-c1ec-4f04-a333-fa8528ce5688-kube-api-access-qmsjk\") pod \"route-controller-manager-596dc78bdd-cbljx\" (UID: \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.781829 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/785aaa2a-c1ec-4f04-a333-fa8528ce5688-config\") pod \"route-controller-manager-596dc78bdd-cbljx\" (UID: \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.781884 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4l27\" (UniqueName: \"kubernetes.io/projected/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-kube-api-access-k4l27\") pod \"controller-manager-58f8979484-ndncp\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.781910 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-serving-cert\") pod \"controller-manager-58f8979484-ndncp\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.781947 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-client-ca\") pod \"controller-manager-58f8979484-ndncp\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.781977 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-proxy-ca-bundles\") pod \"controller-manager-58f8979484-ndncp\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.781997 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-config\") pod \"controller-manager-58f8979484-ndncp\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.782032 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/785aaa2a-c1ec-4f04-a333-fa8528ce5688-serving-cert\") pod \"route-controller-manager-596dc78bdd-cbljx\" (UID: \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.783081 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-client-ca\") pod \"controller-manager-58f8979484-ndncp\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.783310 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-proxy-ca-bundles\") pod \"controller-manager-58f8979484-ndncp\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.783345 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/785aaa2a-c1ec-4f04-a333-fa8528ce5688-config\") pod \"route-controller-manager-596dc78bdd-cbljx\" (UID: \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.783518 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/785aaa2a-c1ec-4f04-a333-fa8528ce5688-client-ca\") pod \"route-controller-manager-596dc78bdd-cbljx\" (UID: \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.783616 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-config\") pod \"controller-manager-58f8979484-ndncp\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.785957 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/785aaa2a-c1ec-4f04-a333-fa8528ce5688-serving-cert\") pod \"route-controller-manager-596dc78bdd-cbljx\" (UID: \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.786485 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-serving-cert\") pod \"controller-manager-58f8979484-ndncp\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.798414 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4l27\" (UniqueName: \"kubernetes.io/projected/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-kube-api-access-k4l27\") pod \"controller-manager-58f8979484-ndncp\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.798418 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmsjk\" (UniqueName: \"kubernetes.io/projected/785aaa2a-c1ec-4f04-a333-fa8528ce5688-kube-api-access-qmsjk\") pod \"route-controller-manager-596dc78bdd-cbljx\" (UID: \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\") " pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.903599 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" Dec 10 06:53:39 crc kubenswrapper[4765]: I1210 06:53:39.918255 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:40 crc kubenswrapper[4765]: I1210 06:53:40.105525 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-58f8979484-ndncp"] Dec 10 06:53:40 crc kubenswrapper[4765]: I1210 06:53:40.306326 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx"] Dec 10 06:53:40 crc kubenswrapper[4765]: W1210 06:53:40.309787 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod785aaa2a_c1ec_4f04_a333_fa8528ce5688.slice/crio-568d5c0c2b224e69ace6289502c4a8e8b6f9b1d7d6b3a3d6f795b21bbaa7808d WatchSource:0}: Error finding container 568d5c0c2b224e69ace6289502c4a8e8b6f9b1d7d6b3a3d6f795b21bbaa7808d: Status 404 returned error can't find the container with id 568d5c0c2b224e69ace6289502c4a8e8b6f9b1d7d6b3a3d6f795b21bbaa7808d Dec 10 06:53:40 crc kubenswrapper[4765]: I1210 06:53:40.594913 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f" path="/var/lib/kubelet/pods/0b6d3ddd-3ba8-4d68-9e05-1b0bcffe0d1f/volumes" Dec 10 06:53:40 crc kubenswrapper[4765]: I1210 06:53:40.595470 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0f8b6b6-1a80-4835-b50e-26dc93c985a0" path="/var/lib/kubelet/pods/f0f8b6b6-1a80-4835-b50e-26dc93c985a0/volumes" Dec 10 06:53:41 crc kubenswrapper[4765]: I1210 06:53:41.148213 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" event={"ID":"785aaa2a-c1ec-4f04-a333-fa8528ce5688","Type":"ContainerStarted","Data":"9daae738147c71b8650e1a59e4f3af71c8650ffc05fb28d97b5ecc4866710c2c"} Dec 10 06:53:41 crc kubenswrapper[4765]: I1210 06:53:41.148287 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" event={"ID":"785aaa2a-c1ec-4f04-a333-fa8528ce5688","Type":"ContainerStarted","Data":"568d5c0c2b224e69ace6289502c4a8e8b6f9b1d7d6b3a3d6f795b21bbaa7808d"} Dec 10 06:53:41 crc kubenswrapper[4765]: I1210 06:53:41.150729 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" Dec 10 06:53:41 crc kubenswrapper[4765]: I1210 06:53:41.152963 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" event={"ID":"5366f55d-8998-47f1-bf4d-1ee21bf1a39f","Type":"ContainerStarted","Data":"36097a5f8465f1e4f9f66fdf97f8bd245b49db60b882d7e8c9c4ee930d97a859"} Dec 10 06:53:41 crc kubenswrapper[4765]: I1210 06:53:41.152993 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" event={"ID":"5366f55d-8998-47f1-bf4d-1ee21bf1a39f","Type":"ContainerStarted","Data":"9068e32c05116b886422ef55fdf5394767ddc9d62184ca824a15f6a369897cd9"} Dec 10 06:53:41 crc kubenswrapper[4765]: I1210 06:53:41.153612 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:41 crc kubenswrapper[4765]: I1210 06:53:41.163367 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:41 crc kubenswrapper[4765]: I1210 06:53:41.177696 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" podStartSLOduration=3.177674818 podStartE2EDuration="3.177674818s" podCreationTimestamp="2025-12-10 06:53:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:53:41.173037536 +0000 UTC m=+340.899703112" watchObservedRunningTime="2025-12-10 06:53:41.177674818 +0000 UTC m=+340.904340144" Dec 10 06:53:41 crc kubenswrapper[4765]: I1210 06:53:41.195839 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" podStartSLOduration=3.195824186 podStartE2EDuration="3.195824186s" podCreationTimestamp="2025-12-10 06:53:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:53:41.194881339 +0000 UTC m=+340.921546655" watchObservedRunningTime="2025-12-10 06:53:41.195824186 +0000 UTC m=+340.922489502" Dec 10 06:53:41 crc kubenswrapper[4765]: I1210 06:53:41.197458 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" Dec 10 06:53:42 crc kubenswrapper[4765]: I1210 06:53:42.603432 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-58f8979484-ndncp"] Dec 10 06:53:42 crc kubenswrapper[4765]: I1210 06:53:42.647896 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx"] Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.167211 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" podUID="5366f55d-8998-47f1-bf4d-1ee21bf1a39f" containerName="controller-manager" containerID="cri-o://36097a5f8465f1e4f9f66fdf97f8bd245b49db60b882d7e8c9c4ee930d97a859" gracePeriod=30 Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.167586 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" podUID="785aaa2a-c1ec-4f04-a333-fa8528ce5688" containerName="route-controller-manager" containerID="cri-o://9daae738147c71b8650e1a59e4f3af71c8650ffc05fb28d97b5ecc4866710c2c" gracePeriod=30 Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.550033 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.585408 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb"] Dec 10 06:53:44 crc kubenswrapper[4765]: E1210 06:53:44.585622 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="785aaa2a-c1ec-4f04-a333-fa8528ce5688" containerName="route-controller-manager" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.585634 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="785aaa2a-c1ec-4f04-a333-fa8528ce5688" containerName="route-controller-manager" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.585750 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="785aaa2a-c1ec-4f04-a333-fa8528ce5688" containerName="route-controller-manager" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.586078 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.586452 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/785aaa2a-c1ec-4f04-a333-fa8528ce5688-client-ca\") pod \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\" (UID: \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\") " Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.586569 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmsjk\" (UniqueName: \"kubernetes.io/projected/785aaa2a-c1ec-4f04-a333-fa8528ce5688-kube-api-access-qmsjk\") pod \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\" (UID: \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\") " Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.586630 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/785aaa2a-c1ec-4f04-a333-fa8528ce5688-serving-cert\") pod \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\" (UID: \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\") " Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.586680 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/785aaa2a-c1ec-4f04-a333-fa8528ce5688-config\") pod \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\" (UID: \"785aaa2a-c1ec-4f04-a333-fa8528ce5688\") " Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.587108 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/785aaa2a-c1ec-4f04-a333-fa8528ce5688-client-ca" (OuterVolumeSpecName: "client-ca") pod "785aaa2a-c1ec-4f04-a333-fa8528ce5688" (UID: "785aaa2a-c1ec-4f04-a333-fa8528ce5688"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.587598 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/785aaa2a-c1ec-4f04-a333-fa8528ce5688-config" (OuterVolumeSpecName: "config") pod "785aaa2a-c1ec-4f04-a333-fa8528ce5688" (UID: "785aaa2a-c1ec-4f04-a333-fa8528ce5688"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.592117 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb"] Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.593331 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/785aaa2a-c1ec-4f04-a333-fa8528ce5688-kube-api-access-qmsjk" (OuterVolumeSpecName: "kube-api-access-qmsjk") pod "785aaa2a-c1ec-4f04-a333-fa8528ce5688" (UID: "785aaa2a-c1ec-4f04-a333-fa8528ce5688"). InnerVolumeSpecName "kube-api-access-qmsjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.593427 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/785aaa2a-c1ec-4f04-a333-fa8528ce5688-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "785aaa2a-c1ec-4f04-a333-fa8528ce5688" (UID: "785aaa2a-c1ec-4f04-a333-fa8528ce5688"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.639274 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.688282 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-proxy-ca-bundles\") pod \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.688325 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k4l27\" (UniqueName: \"kubernetes.io/projected/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-kube-api-access-k4l27\") pod \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.688392 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-config\") pod \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.688460 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-serving-cert\") pod \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.688483 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-client-ca\") pod \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\" (UID: \"5366f55d-8998-47f1-bf4d-1ee21bf1a39f\") " Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.688604 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dcca5aba-a84b-414e-a345-2304ebb500c0-client-ca\") pod \"route-controller-manager-5598468cdf-qcrzb\" (UID: \"dcca5aba-a84b-414e-a345-2304ebb500c0\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.688636 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qg7cx\" (UniqueName: \"kubernetes.io/projected/dcca5aba-a84b-414e-a345-2304ebb500c0-kube-api-access-qg7cx\") pod \"route-controller-manager-5598468cdf-qcrzb\" (UID: \"dcca5aba-a84b-414e-a345-2304ebb500c0\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.688776 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcca5aba-a84b-414e-a345-2304ebb500c0-config\") pod \"route-controller-manager-5598468cdf-qcrzb\" (UID: \"dcca5aba-a84b-414e-a345-2304ebb500c0\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.688818 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcca5aba-a84b-414e-a345-2304ebb500c0-serving-cert\") pod \"route-controller-manager-5598468cdf-qcrzb\" (UID: \"dcca5aba-a84b-414e-a345-2304ebb500c0\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.689159 4765 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/785aaa2a-c1ec-4f04-a333-fa8528ce5688-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.689189 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmsjk\" (UniqueName: \"kubernetes.io/projected/785aaa2a-c1ec-4f04-a333-fa8528ce5688-kube-api-access-qmsjk\") on node \"crc\" DevicePath \"\"" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.689198 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/785aaa2a-c1ec-4f04-a333-fa8528ce5688-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.689207 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/785aaa2a-c1ec-4f04-a333-fa8528ce5688-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.689300 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-client-ca" (OuterVolumeSpecName: "client-ca") pod "5366f55d-8998-47f1-bf4d-1ee21bf1a39f" (UID: "5366f55d-8998-47f1-bf4d-1ee21bf1a39f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.689415 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-config" (OuterVolumeSpecName: "config") pod "5366f55d-8998-47f1-bf4d-1ee21bf1a39f" (UID: "5366f55d-8998-47f1-bf4d-1ee21bf1a39f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.689429 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "5366f55d-8998-47f1-bf4d-1ee21bf1a39f" (UID: "5366f55d-8998-47f1-bf4d-1ee21bf1a39f"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.691847 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-kube-api-access-k4l27" (OuterVolumeSpecName: "kube-api-access-k4l27") pod "5366f55d-8998-47f1-bf4d-1ee21bf1a39f" (UID: "5366f55d-8998-47f1-bf4d-1ee21bf1a39f"). InnerVolumeSpecName "kube-api-access-k4l27". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.695156 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5366f55d-8998-47f1-bf4d-1ee21bf1a39f" (UID: "5366f55d-8998-47f1-bf4d-1ee21bf1a39f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.790266 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dcca5aba-a84b-414e-a345-2304ebb500c0-client-ca\") pod \"route-controller-manager-5598468cdf-qcrzb\" (UID: \"dcca5aba-a84b-414e-a345-2304ebb500c0\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.790321 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qg7cx\" (UniqueName: \"kubernetes.io/projected/dcca5aba-a84b-414e-a345-2304ebb500c0-kube-api-access-qg7cx\") pod \"route-controller-manager-5598468cdf-qcrzb\" (UID: \"dcca5aba-a84b-414e-a345-2304ebb500c0\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.790350 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcca5aba-a84b-414e-a345-2304ebb500c0-config\") pod \"route-controller-manager-5598468cdf-qcrzb\" (UID: \"dcca5aba-a84b-414e-a345-2304ebb500c0\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.790365 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcca5aba-a84b-414e-a345-2304ebb500c0-serving-cert\") pod \"route-controller-manager-5598468cdf-qcrzb\" (UID: \"dcca5aba-a84b-414e-a345-2304ebb500c0\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.790401 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.790413 4765 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.790423 4765 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.790433 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k4l27\" (UniqueName: \"kubernetes.io/projected/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-kube-api-access-k4l27\") on node \"crc\" DevicePath \"\"" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.790442 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5366f55d-8998-47f1-bf4d-1ee21bf1a39f-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.791191 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dcca5aba-a84b-414e-a345-2304ebb500c0-client-ca\") pod \"route-controller-manager-5598468cdf-qcrzb\" (UID: \"dcca5aba-a84b-414e-a345-2304ebb500c0\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.791740 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcca5aba-a84b-414e-a345-2304ebb500c0-config\") pod \"route-controller-manager-5598468cdf-qcrzb\" (UID: \"dcca5aba-a84b-414e-a345-2304ebb500c0\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.793851 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcca5aba-a84b-414e-a345-2304ebb500c0-serving-cert\") pod \"route-controller-manager-5598468cdf-qcrzb\" (UID: \"dcca5aba-a84b-414e-a345-2304ebb500c0\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.805467 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qg7cx\" (UniqueName: \"kubernetes.io/projected/dcca5aba-a84b-414e-a345-2304ebb500c0-kube-api-access-qg7cx\") pod \"route-controller-manager-5598468cdf-qcrzb\" (UID: \"dcca5aba-a84b-414e-a345-2304ebb500c0\") " pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" Dec 10 06:53:44 crc kubenswrapper[4765]: I1210 06:53:44.937646 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.102979 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb"] Dec 10 06:53:45 crc kubenswrapper[4765]: W1210 06:53:45.109128 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddcca5aba_a84b_414e_a345_2304ebb500c0.slice/crio-228608f467dc1dea30b8691d12ebb6cc5ea46552ef05c7052d98fe5df4d96f7b WatchSource:0}: Error finding container 228608f467dc1dea30b8691d12ebb6cc5ea46552ef05c7052d98fe5df4d96f7b: Status 404 returned error can't find the container with id 228608f467dc1dea30b8691d12ebb6cc5ea46552ef05c7052d98fe5df4d96f7b Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.173981 4765 generic.go:334] "Generic (PLEG): container finished" podID="5366f55d-8998-47f1-bf4d-1ee21bf1a39f" containerID="36097a5f8465f1e4f9f66fdf97f8bd245b49db60b882d7e8c9c4ee930d97a859" exitCode=0 Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.174030 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" event={"ID":"5366f55d-8998-47f1-bf4d-1ee21bf1a39f","Type":"ContainerDied","Data":"36097a5f8465f1e4f9f66fdf97f8bd245b49db60b882d7e8c9c4ee930d97a859"} Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.174406 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" event={"ID":"5366f55d-8998-47f1-bf4d-1ee21bf1a39f","Type":"ContainerDied","Data":"9068e32c05116b886422ef55fdf5394767ddc9d62184ca824a15f6a369897cd9"} Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.174431 4765 scope.go:117] "RemoveContainer" containerID="36097a5f8465f1e4f9f66fdf97f8bd245b49db60b882d7e8c9c4ee930d97a859" Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.174109 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58f8979484-ndncp" Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.179429 4765 generic.go:334] "Generic (PLEG): container finished" podID="785aaa2a-c1ec-4f04-a333-fa8528ce5688" containerID="9daae738147c71b8650e1a59e4f3af71c8650ffc05fb28d97b5ecc4866710c2c" exitCode=0 Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.179603 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.179617 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" event={"ID":"785aaa2a-c1ec-4f04-a333-fa8528ce5688","Type":"ContainerDied","Data":"9daae738147c71b8650e1a59e4f3af71c8650ffc05fb28d97b5ecc4866710c2c"} Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.179668 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx" event={"ID":"785aaa2a-c1ec-4f04-a333-fa8528ce5688","Type":"ContainerDied","Data":"568d5c0c2b224e69ace6289502c4a8e8b6f9b1d7d6b3a3d6f795b21bbaa7808d"} Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.183031 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" event={"ID":"dcca5aba-a84b-414e-a345-2304ebb500c0","Type":"ContainerStarted","Data":"228608f467dc1dea30b8691d12ebb6cc5ea46552ef05c7052d98fe5df4d96f7b"} Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.199675 4765 scope.go:117] "RemoveContainer" containerID="36097a5f8465f1e4f9f66fdf97f8bd245b49db60b882d7e8c9c4ee930d97a859" Dec 10 06:53:45 crc kubenswrapper[4765]: E1210 06:53:45.200540 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36097a5f8465f1e4f9f66fdf97f8bd245b49db60b882d7e8c9c4ee930d97a859\": container with ID starting with 36097a5f8465f1e4f9f66fdf97f8bd245b49db60b882d7e8c9c4ee930d97a859 not found: ID does not exist" containerID="36097a5f8465f1e4f9f66fdf97f8bd245b49db60b882d7e8c9c4ee930d97a859" Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.200579 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36097a5f8465f1e4f9f66fdf97f8bd245b49db60b882d7e8c9c4ee930d97a859"} err="failed to get container status \"36097a5f8465f1e4f9f66fdf97f8bd245b49db60b882d7e8c9c4ee930d97a859\": rpc error: code = NotFound desc = could not find container \"36097a5f8465f1e4f9f66fdf97f8bd245b49db60b882d7e8c9c4ee930d97a859\": container with ID starting with 36097a5f8465f1e4f9f66fdf97f8bd245b49db60b882d7e8c9c4ee930d97a859 not found: ID does not exist" Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.200601 4765 scope.go:117] "RemoveContainer" containerID="9daae738147c71b8650e1a59e4f3af71c8650ffc05fb28d97b5ecc4866710c2c" Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.200675 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx"] Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.209920 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-596dc78bdd-cbljx"] Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.214731 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-58f8979484-ndncp"] Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.215409 4765 scope.go:117] "RemoveContainer" containerID="9daae738147c71b8650e1a59e4f3af71c8650ffc05fb28d97b5ecc4866710c2c" Dec 10 06:53:45 crc kubenswrapper[4765]: E1210 06:53:45.215972 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9daae738147c71b8650e1a59e4f3af71c8650ffc05fb28d97b5ecc4866710c2c\": container with ID starting with 9daae738147c71b8650e1a59e4f3af71c8650ffc05fb28d97b5ecc4866710c2c not found: ID does not exist" containerID="9daae738147c71b8650e1a59e4f3af71c8650ffc05fb28d97b5ecc4866710c2c" Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.215999 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9daae738147c71b8650e1a59e4f3af71c8650ffc05fb28d97b5ecc4866710c2c"} err="failed to get container status \"9daae738147c71b8650e1a59e4f3af71c8650ffc05fb28d97b5ecc4866710c2c\": rpc error: code = NotFound desc = could not find container \"9daae738147c71b8650e1a59e4f3af71c8650ffc05fb28d97b5ecc4866710c2c\": container with ID starting with 9daae738147c71b8650e1a59e4f3af71c8650ffc05fb28d97b5ecc4866710c2c not found: ID does not exist" Dec 10 06:53:45 crc kubenswrapper[4765]: I1210 06:53:45.219045 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-58f8979484-ndncp"] Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.188321 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" event={"ID":"dcca5aba-a84b-414e-a345-2304ebb500c0","Type":"ContainerStarted","Data":"99d92218bde2c5be72d7b6256c01de3d84c001cb5d144183cd0dc3c20fa89417"} Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.188700 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.193604 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.207464 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" podStartSLOduration=3.2074463 podStartE2EDuration="3.2074463s" podCreationTimestamp="2025-12-10 06:53:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:53:46.202840889 +0000 UTC m=+345.929506225" watchObservedRunningTime="2025-12-10 06:53:46.2074463 +0000 UTC m=+345.934111616" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.585970 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5964cbcb45-ctwd2"] Dec 10 06:53:46 crc kubenswrapper[4765]: E1210 06:53:46.586227 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5366f55d-8998-47f1-bf4d-1ee21bf1a39f" containerName="controller-manager" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.586241 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="5366f55d-8998-47f1-bf4d-1ee21bf1a39f" containerName="controller-manager" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.586358 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="5366f55d-8998-47f1-bf4d-1ee21bf1a39f" containerName="controller-manager" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.586731 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.588467 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.589516 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.589867 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.590279 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.590331 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.595986 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5366f55d-8998-47f1-bf4d-1ee21bf1a39f" path="/var/lib/kubelet/pods/5366f55d-8998-47f1-bf4d-1ee21bf1a39f/volumes" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.596817 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="785aaa2a-c1ec-4f04-a333-fa8528ce5688" path="/var/lib/kubelet/pods/785aaa2a-c1ec-4f04-a333-fa8528ce5688/volumes" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.596975 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.597306 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.601624 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5964cbcb45-ctwd2"] Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.612236 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nl9gg\" (UniqueName: \"kubernetes.io/projected/be6aa029-45f0-48ef-8c9f-0bebe1720386-kube-api-access-nl9gg\") pod \"controller-manager-5964cbcb45-ctwd2\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.612597 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/be6aa029-45f0-48ef-8c9f-0bebe1720386-proxy-ca-bundles\") pod \"controller-manager-5964cbcb45-ctwd2\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.612786 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be6aa029-45f0-48ef-8c9f-0bebe1720386-serving-cert\") pod \"controller-manager-5964cbcb45-ctwd2\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.612940 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/be6aa029-45f0-48ef-8c9f-0bebe1720386-client-ca\") pod \"controller-manager-5964cbcb45-ctwd2\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.613179 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be6aa029-45f0-48ef-8c9f-0bebe1720386-config\") pod \"controller-manager-5964cbcb45-ctwd2\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.714629 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nl9gg\" (UniqueName: \"kubernetes.io/projected/be6aa029-45f0-48ef-8c9f-0bebe1720386-kube-api-access-nl9gg\") pod \"controller-manager-5964cbcb45-ctwd2\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.714967 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/be6aa029-45f0-48ef-8c9f-0bebe1720386-proxy-ca-bundles\") pod \"controller-manager-5964cbcb45-ctwd2\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.715025 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be6aa029-45f0-48ef-8c9f-0bebe1720386-serving-cert\") pod \"controller-manager-5964cbcb45-ctwd2\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.715060 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/be6aa029-45f0-48ef-8c9f-0bebe1720386-client-ca\") pod \"controller-manager-5964cbcb45-ctwd2\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.715122 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be6aa029-45f0-48ef-8c9f-0bebe1720386-config\") pod \"controller-manager-5964cbcb45-ctwd2\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.716247 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/be6aa029-45f0-48ef-8c9f-0bebe1720386-client-ca\") pod \"controller-manager-5964cbcb45-ctwd2\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.716420 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be6aa029-45f0-48ef-8c9f-0bebe1720386-config\") pod \"controller-manager-5964cbcb45-ctwd2\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.716515 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/be6aa029-45f0-48ef-8c9f-0bebe1720386-proxy-ca-bundles\") pod \"controller-manager-5964cbcb45-ctwd2\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.720486 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be6aa029-45f0-48ef-8c9f-0bebe1720386-serving-cert\") pod \"controller-manager-5964cbcb45-ctwd2\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.734794 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nl9gg\" (UniqueName: \"kubernetes.io/projected/be6aa029-45f0-48ef-8c9f-0bebe1720386-kube-api-access-nl9gg\") pod \"controller-manager-5964cbcb45-ctwd2\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:46 crc kubenswrapper[4765]: I1210 06:53:46.899973 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:47 crc kubenswrapper[4765]: I1210 06:53:47.065259 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5964cbcb45-ctwd2"] Dec 10 06:53:47 crc kubenswrapper[4765]: W1210 06:53:47.073221 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbe6aa029_45f0_48ef_8c9f_0bebe1720386.slice/crio-74a259299352baba4ac1554b8d6bf2bd4213337e9a32efccfebce3caf5f42e76 WatchSource:0}: Error finding container 74a259299352baba4ac1554b8d6bf2bd4213337e9a32efccfebce3caf5f42e76: Status 404 returned error can't find the container with id 74a259299352baba4ac1554b8d6bf2bd4213337e9a32efccfebce3caf5f42e76 Dec 10 06:53:47 crc kubenswrapper[4765]: I1210 06:53:47.198021 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" event={"ID":"be6aa029-45f0-48ef-8c9f-0bebe1720386","Type":"ContainerStarted","Data":"5ca8cd889663014c19de20cf3b23f01051cff53feb1c055bf4944f4189063c6e"} Dec 10 06:53:47 crc kubenswrapper[4765]: I1210 06:53:47.198314 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" event={"ID":"be6aa029-45f0-48ef-8c9f-0bebe1720386","Type":"ContainerStarted","Data":"74a259299352baba4ac1554b8d6bf2bd4213337e9a32efccfebce3caf5f42e76"} Dec 10 06:53:47 crc kubenswrapper[4765]: I1210 06:53:47.223859 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" podStartSLOduration=4.223843389 podStartE2EDuration="4.223843389s" podCreationTimestamp="2025-12-10 06:53:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:53:47.217895599 +0000 UTC m=+346.944560915" watchObservedRunningTime="2025-12-10 06:53:47.223843389 +0000 UTC m=+346.950508705" Dec 10 06:53:48 crc kubenswrapper[4765]: I1210 06:53:48.202470 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:48 crc kubenswrapper[4765]: I1210 06:53:48.207355 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:53:50 crc kubenswrapper[4765]: I1210 06:53:50.790753 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-x65d6"] Dec 10 06:53:50 crc kubenswrapper[4765]: I1210 06:53:50.792173 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x65d6" Dec 10 06:53:50 crc kubenswrapper[4765]: I1210 06:53:50.794548 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 10 06:53:50 crc kubenswrapper[4765]: I1210 06:53:50.809349 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x65d6"] Dec 10 06:53:50 crc kubenswrapper[4765]: I1210 06:53:50.871233 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gn7th\" (UniqueName: \"kubernetes.io/projected/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c-kube-api-access-gn7th\") pod \"community-operators-x65d6\" (UID: \"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c\") " pod="openshift-marketplace/community-operators-x65d6" Dec 10 06:53:50 crc kubenswrapper[4765]: I1210 06:53:50.871291 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c-catalog-content\") pod \"community-operators-x65d6\" (UID: \"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c\") " pod="openshift-marketplace/community-operators-x65d6" Dec 10 06:53:50 crc kubenswrapper[4765]: I1210 06:53:50.871321 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c-utilities\") pod \"community-operators-x65d6\" (UID: \"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c\") " pod="openshift-marketplace/community-operators-x65d6" Dec 10 06:53:50 crc kubenswrapper[4765]: I1210 06:53:50.972547 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c-utilities\") pod \"community-operators-x65d6\" (UID: \"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c\") " pod="openshift-marketplace/community-operators-x65d6" Dec 10 06:53:50 crc kubenswrapper[4765]: I1210 06:53:50.972659 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gn7th\" (UniqueName: \"kubernetes.io/projected/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c-kube-api-access-gn7th\") pod \"community-operators-x65d6\" (UID: \"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c\") " pod="openshift-marketplace/community-operators-x65d6" Dec 10 06:53:50 crc kubenswrapper[4765]: I1210 06:53:50.972716 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c-catalog-content\") pod \"community-operators-x65d6\" (UID: \"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c\") " pod="openshift-marketplace/community-operators-x65d6" Dec 10 06:53:50 crc kubenswrapper[4765]: I1210 06:53:50.973055 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c-utilities\") pod \"community-operators-x65d6\" (UID: \"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c\") " pod="openshift-marketplace/community-operators-x65d6" Dec 10 06:53:50 crc kubenswrapper[4765]: I1210 06:53:50.973108 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c-catalog-content\") pod \"community-operators-x65d6\" (UID: \"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c\") " pod="openshift-marketplace/community-operators-x65d6" Dec 10 06:53:50 crc kubenswrapper[4765]: I1210 06:53:50.983050 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-grp4g"] Dec 10 06:53:50 crc kubenswrapper[4765]: I1210 06:53:50.984516 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-grp4g" Dec 10 06:53:50 crc kubenswrapper[4765]: I1210 06:53:50.986459 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 10 06:53:50 crc kubenswrapper[4765]: I1210 06:53:50.992400 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gn7th\" (UniqueName: \"kubernetes.io/projected/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c-kube-api-access-gn7th\") pod \"community-operators-x65d6\" (UID: \"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c\") " pod="openshift-marketplace/community-operators-x65d6" Dec 10 06:53:50 crc kubenswrapper[4765]: I1210 06:53:50.993178 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-grp4g"] Dec 10 06:53:51 crc kubenswrapper[4765]: I1210 06:53:51.074372 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hglzf\" (UniqueName: \"kubernetes.io/projected/2b5f2726-04c7-41c4-ab5c-5eb64062c107-kube-api-access-hglzf\") pod \"certified-operators-grp4g\" (UID: \"2b5f2726-04c7-41c4-ab5c-5eb64062c107\") " pod="openshift-marketplace/certified-operators-grp4g" Dec 10 06:53:51 crc kubenswrapper[4765]: I1210 06:53:51.074464 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b5f2726-04c7-41c4-ab5c-5eb64062c107-utilities\") pod \"certified-operators-grp4g\" (UID: \"2b5f2726-04c7-41c4-ab5c-5eb64062c107\") " pod="openshift-marketplace/certified-operators-grp4g" Dec 10 06:53:51 crc kubenswrapper[4765]: I1210 06:53:51.074504 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b5f2726-04c7-41c4-ab5c-5eb64062c107-catalog-content\") pod \"certified-operators-grp4g\" (UID: \"2b5f2726-04c7-41c4-ab5c-5eb64062c107\") " pod="openshift-marketplace/certified-operators-grp4g" Dec 10 06:53:51 crc kubenswrapper[4765]: I1210 06:53:51.113334 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x65d6" Dec 10 06:53:51 crc kubenswrapper[4765]: I1210 06:53:51.175639 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hglzf\" (UniqueName: \"kubernetes.io/projected/2b5f2726-04c7-41c4-ab5c-5eb64062c107-kube-api-access-hglzf\") pod \"certified-operators-grp4g\" (UID: \"2b5f2726-04c7-41c4-ab5c-5eb64062c107\") " pod="openshift-marketplace/certified-operators-grp4g" Dec 10 06:53:51 crc kubenswrapper[4765]: I1210 06:53:51.175727 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b5f2726-04c7-41c4-ab5c-5eb64062c107-utilities\") pod \"certified-operators-grp4g\" (UID: \"2b5f2726-04c7-41c4-ab5c-5eb64062c107\") " pod="openshift-marketplace/certified-operators-grp4g" Dec 10 06:53:51 crc kubenswrapper[4765]: I1210 06:53:51.175758 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b5f2726-04c7-41c4-ab5c-5eb64062c107-catalog-content\") pod \"certified-operators-grp4g\" (UID: \"2b5f2726-04c7-41c4-ab5c-5eb64062c107\") " pod="openshift-marketplace/certified-operators-grp4g" Dec 10 06:53:51 crc kubenswrapper[4765]: I1210 06:53:51.176659 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b5f2726-04c7-41c4-ab5c-5eb64062c107-utilities\") pod \"certified-operators-grp4g\" (UID: \"2b5f2726-04c7-41c4-ab5c-5eb64062c107\") " pod="openshift-marketplace/certified-operators-grp4g" Dec 10 06:53:51 crc kubenswrapper[4765]: I1210 06:53:51.176670 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b5f2726-04c7-41c4-ab5c-5eb64062c107-catalog-content\") pod \"certified-operators-grp4g\" (UID: \"2b5f2726-04c7-41c4-ab5c-5eb64062c107\") " pod="openshift-marketplace/certified-operators-grp4g" Dec 10 06:53:51 crc kubenswrapper[4765]: I1210 06:53:51.201305 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hglzf\" (UniqueName: \"kubernetes.io/projected/2b5f2726-04c7-41c4-ab5c-5eb64062c107-kube-api-access-hglzf\") pod \"certified-operators-grp4g\" (UID: \"2b5f2726-04c7-41c4-ab5c-5eb64062c107\") " pod="openshift-marketplace/certified-operators-grp4g" Dec 10 06:53:51 crc kubenswrapper[4765]: I1210 06:53:51.312321 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-grp4g" Dec 10 06:53:51 crc kubenswrapper[4765]: I1210 06:53:51.503245 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x65d6"] Dec 10 06:53:51 crc kubenswrapper[4765]: W1210 06:53:51.507687 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod237c5fb0_13a8_4c3b_af3a_aca5dfe5b86c.slice/crio-92555d7f32ad221a69782b3deb67178c449e002a2f0177cde0f878cdc45bb93c WatchSource:0}: Error finding container 92555d7f32ad221a69782b3deb67178c449e002a2f0177cde0f878cdc45bb93c: Status 404 returned error can't find the container with id 92555d7f32ad221a69782b3deb67178c449e002a2f0177cde0f878cdc45bb93c Dec 10 06:53:51 crc kubenswrapper[4765]: I1210 06:53:51.730134 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-grp4g"] Dec 10 06:53:51 crc kubenswrapper[4765]: W1210 06:53:51.733313 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b5f2726_04c7_41c4_ab5c_5eb64062c107.slice/crio-4300c699aa5d0ece37ae0da23fce1a14f0ed0a31a9c2df73036ebc28405100cd WatchSource:0}: Error finding container 4300c699aa5d0ece37ae0da23fce1a14f0ed0a31a9c2df73036ebc28405100cd: Status 404 returned error can't find the container with id 4300c699aa5d0ece37ae0da23fce1a14f0ed0a31a9c2df73036ebc28405100cd Dec 10 06:53:52 crc kubenswrapper[4765]: I1210 06:53:52.226512 4765 generic.go:334] "Generic (PLEG): container finished" podID="2b5f2726-04c7-41c4-ab5c-5eb64062c107" containerID="3a331af0f6d821eedc6ba12eaa512f9150c72dcfe0ec82aa9b5445a735cb41ec" exitCode=0 Dec 10 06:53:52 crc kubenswrapper[4765]: I1210 06:53:52.226577 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grp4g" event={"ID":"2b5f2726-04c7-41c4-ab5c-5eb64062c107","Type":"ContainerDied","Data":"3a331af0f6d821eedc6ba12eaa512f9150c72dcfe0ec82aa9b5445a735cb41ec"} Dec 10 06:53:52 crc kubenswrapper[4765]: I1210 06:53:52.226603 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grp4g" event={"ID":"2b5f2726-04c7-41c4-ab5c-5eb64062c107","Type":"ContainerStarted","Data":"4300c699aa5d0ece37ae0da23fce1a14f0ed0a31a9c2df73036ebc28405100cd"} Dec 10 06:53:52 crc kubenswrapper[4765]: I1210 06:53:52.228247 4765 generic.go:334] "Generic (PLEG): container finished" podID="237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c" containerID="1d553645481de4b548336dd6d184e0a9749f29f661102671aed4d0b8f5090d9a" exitCode=0 Dec 10 06:53:52 crc kubenswrapper[4765]: I1210 06:53:52.228285 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x65d6" event={"ID":"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c","Type":"ContainerDied","Data":"1d553645481de4b548336dd6d184e0a9749f29f661102671aed4d0b8f5090d9a"} Dec 10 06:53:52 crc kubenswrapper[4765]: I1210 06:53:52.228328 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x65d6" event={"ID":"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c","Type":"ContainerStarted","Data":"92555d7f32ad221a69782b3deb67178c449e002a2f0177cde0f878cdc45bb93c"} Dec 10 06:53:53 crc kubenswrapper[4765]: I1210 06:53:53.234723 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grp4g" event={"ID":"2b5f2726-04c7-41c4-ab5c-5eb64062c107","Type":"ContainerStarted","Data":"a0018b9a7c37d04e50ff9c508a56d030a8884f002011c0f56fdabde52e8561c5"} Dec 10 06:53:53 crc kubenswrapper[4765]: I1210 06:53:53.239451 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x65d6" event={"ID":"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c","Type":"ContainerStarted","Data":"665bfcdd64158189627806224a38b9a6366ea55ebc5ac6f6265522634ab3c811"} Dec 10 06:53:54 crc kubenswrapper[4765]: I1210 06:53:54.246688 4765 generic.go:334] "Generic (PLEG): container finished" podID="2b5f2726-04c7-41c4-ab5c-5eb64062c107" containerID="a0018b9a7c37d04e50ff9c508a56d030a8884f002011c0f56fdabde52e8561c5" exitCode=0 Dec 10 06:53:54 crc kubenswrapper[4765]: I1210 06:53:54.246771 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grp4g" event={"ID":"2b5f2726-04c7-41c4-ab5c-5eb64062c107","Type":"ContainerDied","Data":"a0018b9a7c37d04e50ff9c508a56d030a8884f002011c0f56fdabde52e8561c5"} Dec 10 06:53:54 crc kubenswrapper[4765]: I1210 06:53:54.249955 4765 generic.go:334] "Generic (PLEG): container finished" podID="237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c" containerID="665bfcdd64158189627806224a38b9a6366ea55ebc5ac6f6265522634ab3c811" exitCode=0 Dec 10 06:53:54 crc kubenswrapper[4765]: I1210 06:53:54.249991 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x65d6" event={"ID":"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c","Type":"ContainerDied","Data":"665bfcdd64158189627806224a38b9a6366ea55ebc5ac6f6265522634ab3c811"} Dec 10 06:53:55 crc kubenswrapper[4765]: I1210 06:53:55.256612 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grp4g" event={"ID":"2b5f2726-04c7-41c4-ab5c-5eb64062c107","Type":"ContainerStarted","Data":"20646b1f485d66103004f148e53b2865d8ff33ae31cce9633abe94b2d2f298b1"} Dec 10 06:53:55 crc kubenswrapper[4765]: I1210 06:53:55.258449 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x65d6" event={"ID":"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c","Type":"ContainerStarted","Data":"20906491490e7d6069a59255734e7cf3e4794d42c8ff7f117dcfe053893c5a73"} Dec 10 06:53:55 crc kubenswrapper[4765]: I1210 06:53:55.272642 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-grp4g" podStartSLOduration=2.472916066 podStartE2EDuration="5.272625641s" podCreationTimestamp="2025-12-10 06:53:50 +0000 UTC" firstStartedPulling="2025-12-10 06:53:52.230114417 +0000 UTC m=+351.956779733" lastFinishedPulling="2025-12-10 06:53:55.029823992 +0000 UTC m=+354.756489308" observedRunningTime="2025-12-10 06:53:55.270802879 +0000 UTC m=+354.997468185" watchObservedRunningTime="2025-12-10 06:53:55.272625641 +0000 UTC m=+354.999290957" Dec 10 06:53:55 crc kubenswrapper[4765]: I1210 06:53:55.291513 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-x65d6" podStartSLOduration=2.522200583 podStartE2EDuration="5.29149514s" podCreationTimestamp="2025-12-10 06:53:50 +0000 UTC" firstStartedPulling="2025-12-10 06:53:52.229766227 +0000 UTC m=+351.956431543" lastFinishedPulling="2025-12-10 06:53:54.999060784 +0000 UTC m=+354.725726100" observedRunningTime="2025-12-10 06:53:55.288614437 +0000 UTC m=+355.015279753" watchObservedRunningTime="2025-12-10 06:53:55.29149514 +0000 UTC m=+355.018160446" Dec 10 06:54:01 crc kubenswrapper[4765]: I1210 06:54:01.113979 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-x65d6" Dec 10 06:54:01 crc kubenswrapper[4765]: I1210 06:54:01.115111 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-x65d6" Dec 10 06:54:01 crc kubenswrapper[4765]: I1210 06:54:01.158597 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-x65d6" Dec 10 06:54:01 crc kubenswrapper[4765]: I1210 06:54:01.312839 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-grp4g" Dec 10 06:54:01 crc kubenswrapper[4765]: I1210 06:54:01.312915 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-grp4g" Dec 10 06:54:01 crc kubenswrapper[4765]: I1210 06:54:01.326678 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-x65d6" Dec 10 06:54:01 crc kubenswrapper[4765]: I1210 06:54:01.366229 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-grp4g" Dec 10 06:54:01 crc kubenswrapper[4765]: I1210 06:54:01.984741 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-khqgp"] Dec 10 06:54:01 crc kubenswrapper[4765]: I1210 06:54:01.985826 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-khqgp" Dec 10 06:54:02 crc kubenswrapper[4765]: I1210 06:54:02.001644 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-khqgp"] Dec 10 06:54:02 crc kubenswrapper[4765]: I1210 06:54:02.124214 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5n984\" (UniqueName: \"kubernetes.io/projected/3c5d628e-14a2-41d3-9d0b-b13465015fac-kube-api-access-5n984\") pod \"certified-operators-khqgp\" (UID: \"3c5d628e-14a2-41d3-9d0b-b13465015fac\") " pod="openshift-marketplace/certified-operators-khqgp" Dec 10 06:54:02 crc kubenswrapper[4765]: I1210 06:54:02.124309 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c5d628e-14a2-41d3-9d0b-b13465015fac-catalog-content\") pod \"certified-operators-khqgp\" (UID: \"3c5d628e-14a2-41d3-9d0b-b13465015fac\") " pod="openshift-marketplace/certified-operators-khqgp" Dec 10 06:54:02 crc kubenswrapper[4765]: I1210 06:54:02.124357 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c5d628e-14a2-41d3-9d0b-b13465015fac-utilities\") pod \"certified-operators-khqgp\" (UID: \"3c5d628e-14a2-41d3-9d0b-b13465015fac\") " pod="openshift-marketplace/certified-operators-khqgp" Dec 10 06:54:02 crc kubenswrapper[4765]: I1210 06:54:02.224879 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c5d628e-14a2-41d3-9d0b-b13465015fac-catalog-content\") pod \"certified-operators-khqgp\" (UID: \"3c5d628e-14a2-41d3-9d0b-b13465015fac\") " pod="openshift-marketplace/certified-operators-khqgp" Dec 10 06:54:02 crc kubenswrapper[4765]: I1210 06:54:02.224940 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c5d628e-14a2-41d3-9d0b-b13465015fac-utilities\") pod \"certified-operators-khqgp\" (UID: \"3c5d628e-14a2-41d3-9d0b-b13465015fac\") " pod="openshift-marketplace/certified-operators-khqgp" Dec 10 06:54:02 crc kubenswrapper[4765]: I1210 06:54:02.224978 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5n984\" (UniqueName: \"kubernetes.io/projected/3c5d628e-14a2-41d3-9d0b-b13465015fac-kube-api-access-5n984\") pod \"certified-operators-khqgp\" (UID: \"3c5d628e-14a2-41d3-9d0b-b13465015fac\") " pod="openshift-marketplace/certified-operators-khqgp" Dec 10 06:54:02 crc kubenswrapper[4765]: I1210 06:54:02.225863 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c5d628e-14a2-41d3-9d0b-b13465015fac-catalog-content\") pod \"certified-operators-khqgp\" (UID: \"3c5d628e-14a2-41d3-9d0b-b13465015fac\") " pod="openshift-marketplace/certified-operators-khqgp" Dec 10 06:54:02 crc kubenswrapper[4765]: I1210 06:54:02.225893 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c5d628e-14a2-41d3-9d0b-b13465015fac-utilities\") pod \"certified-operators-khqgp\" (UID: \"3c5d628e-14a2-41d3-9d0b-b13465015fac\") " pod="openshift-marketplace/certified-operators-khqgp" Dec 10 06:54:02 crc kubenswrapper[4765]: I1210 06:54:02.243040 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5n984\" (UniqueName: \"kubernetes.io/projected/3c5d628e-14a2-41d3-9d0b-b13465015fac-kube-api-access-5n984\") pod \"certified-operators-khqgp\" (UID: \"3c5d628e-14a2-41d3-9d0b-b13465015fac\") " pod="openshift-marketplace/certified-operators-khqgp" Dec 10 06:54:02 crc kubenswrapper[4765]: I1210 06:54:02.303793 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-khqgp" Dec 10 06:54:02 crc kubenswrapper[4765]: I1210 06:54:02.334321 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-grp4g" Dec 10 06:54:02 crc kubenswrapper[4765]: I1210 06:54:02.728167 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-khqgp"] Dec 10 06:54:03 crc kubenswrapper[4765]: I1210 06:54:03.301021 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-khqgp" event={"ID":"3c5d628e-14a2-41d3-9d0b-b13465015fac","Type":"ContainerStarted","Data":"d281f91fc5cbb14176d2732d2d3a3c1574376d30f049a5b4221760ec532825ce"} Dec 10 06:54:04 crc kubenswrapper[4765]: I1210 06:54:04.049176 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 06:54:04 crc kubenswrapper[4765]: I1210 06:54:04.049260 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 06:54:05 crc kubenswrapper[4765]: I1210 06:54:05.312326 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-khqgp" event={"ID":"3c5d628e-14a2-41d3-9d0b-b13465015fac","Type":"ContainerStarted","Data":"80b3b28a40480b596e0999a4a84467211114be4b4e17888339e295b403fffae3"} Dec 10 06:54:06 crc kubenswrapper[4765]: I1210 06:54:06.318428 4765 generic.go:334] "Generic (PLEG): container finished" podID="3c5d628e-14a2-41d3-9d0b-b13465015fac" containerID="80b3b28a40480b596e0999a4a84467211114be4b4e17888339e295b403fffae3" exitCode=0 Dec 10 06:54:06 crc kubenswrapper[4765]: I1210 06:54:06.318476 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-khqgp" event={"ID":"3c5d628e-14a2-41d3-9d0b-b13465015fac","Type":"ContainerDied","Data":"80b3b28a40480b596e0999a4a84467211114be4b4e17888339e295b403fffae3"} Dec 10 06:54:07 crc kubenswrapper[4765]: I1210 06:54:07.325273 4765 generic.go:334] "Generic (PLEG): container finished" podID="3c5d628e-14a2-41d3-9d0b-b13465015fac" containerID="7b55a309d88f8ea774cfad67252adeb8b37d57bf9ab9d1f7eff579526e520ab3" exitCode=0 Dec 10 06:54:07 crc kubenswrapper[4765]: I1210 06:54:07.325435 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-khqgp" event={"ID":"3c5d628e-14a2-41d3-9d0b-b13465015fac","Type":"ContainerDied","Data":"7b55a309d88f8ea774cfad67252adeb8b37d57bf9ab9d1f7eff579526e520ab3"} Dec 10 06:54:08 crc kubenswrapper[4765]: I1210 06:54:08.331787 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-khqgp" event={"ID":"3c5d628e-14a2-41d3-9d0b-b13465015fac","Type":"ContainerStarted","Data":"e3f3a8c41b7d1b589e6a371e1dcf1d4f192751feabd734f8a6c6b89be43a50c3"} Dec 10 06:54:08 crc kubenswrapper[4765]: I1210 06:54:08.346729 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-khqgp" podStartSLOduration=5.593990425 podStartE2EDuration="7.346711868s" podCreationTimestamp="2025-12-10 06:54:01 +0000 UTC" firstStartedPulling="2025-12-10 06:54:06.319921073 +0000 UTC m=+366.046586389" lastFinishedPulling="2025-12-10 06:54:08.072642516 +0000 UTC m=+367.799307832" observedRunningTime="2025-12-10 06:54:08.345424191 +0000 UTC m=+368.072089517" watchObservedRunningTime="2025-12-10 06:54:08.346711868 +0000 UTC m=+368.073377184" Dec 10 06:54:12 crc kubenswrapper[4765]: I1210 06:54:12.304034 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-khqgp" Dec 10 06:54:12 crc kubenswrapper[4765]: I1210 06:54:12.304141 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-khqgp" Dec 10 06:54:12 crc kubenswrapper[4765]: I1210 06:54:12.343605 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-khqgp" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.333200 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-p6s2f"] Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.348339 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.358442 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-p6s2f"] Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.513177 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/1d3be27b-32c4-4d59-80c9-ef75482bc20f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.513233 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/1d3be27b-32c4-4d59-80c9-ef75482bc20f-registry-tls\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.513250 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1d3be27b-32c4-4d59-80c9-ef75482bc20f-bound-sa-token\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.513284 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fz7pl\" (UniqueName: \"kubernetes.io/projected/1d3be27b-32c4-4d59-80c9-ef75482bc20f-kube-api-access-fz7pl\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.513304 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/1d3be27b-32c4-4d59-80c9-ef75482bc20f-registry-certificates\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.513322 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1d3be27b-32c4-4d59-80c9-ef75482bc20f-trusted-ca\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.513385 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.513837 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/1d3be27b-32c4-4d59-80c9-ef75482bc20f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.537953 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.615263 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fz7pl\" (UniqueName: \"kubernetes.io/projected/1d3be27b-32c4-4d59-80c9-ef75482bc20f-kube-api-access-fz7pl\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.615306 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/1d3be27b-32c4-4d59-80c9-ef75482bc20f-registry-certificates\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.615325 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1d3be27b-32c4-4d59-80c9-ef75482bc20f-trusted-ca\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.615350 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/1d3be27b-32c4-4d59-80c9-ef75482bc20f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.615401 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/1d3be27b-32c4-4d59-80c9-ef75482bc20f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.615434 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/1d3be27b-32c4-4d59-80c9-ef75482bc20f-registry-tls\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.615449 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1d3be27b-32c4-4d59-80c9-ef75482bc20f-bound-sa-token\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.616428 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/1d3be27b-32c4-4d59-80c9-ef75482bc20f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.617319 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/1d3be27b-32c4-4d59-80c9-ef75482bc20f-registry-certificates\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.617379 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1d3be27b-32c4-4d59-80c9-ef75482bc20f-trusted-ca\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.628223 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/1d3be27b-32c4-4d59-80c9-ef75482bc20f-registry-tls\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.628858 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/1d3be27b-32c4-4d59-80c9-ef75482bc20f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.631226 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fz7pl\" (UniqueName: \"kubernetes.io/projected/1d3be27b-32c4-4d59-80c9-ef75482bc20f-kube-api-access-fz7pl\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.634268 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1d3be27b-32c4-4d59-80c9-ef75482bc20f-bound-sa-token\") pod \"image-registry-66df7c8f76-p6s2f\" (UID: \"1d3be27b-32c4-4d59-80c9-ef75482bc20f\") " pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:17 crc kubenswrapper[4765]: I1210 06:54:17.721840 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:18 crc kubenswrapper[4765]: I1210 06:54:18.055460 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5964cbcb45-ctwd2"] Dec 10 06:54:18 crc kubenswrapper[4765]: I1210 06:54:18.055759 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" podUID="be6aa029-45f0-48ef-8c9f-0bebe1720386" containerName="controller-manager" containerID="cri-o://5ca8cd889663014c19de20cf3b23f01051cff53feb1c055bf4944f4189063c6e" gracePeriod=30 Dec 10 06:54:18 crc kubenswrapper[4765]: I1210 06:54:18.062625 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb"] Dec 10 06:54:18 crc kubenswrapper[4765]: I1210 06:54:18.062867 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" podUID="dcca5aba-a84b-414e-a345-2304ebb500c0" containerName="route-controller-manager" containerID="cri-o://99d92218bde2c5be72d7b6256c01de3d84c001cb5d144183cd0dc3c20fa89417" gracePeriod=30 Dec 10 06:54:18 crc kubenswrapper[4765]: I1210 06:54:18.117986 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-p6s2f"] Dec 10 06:54:18 crc kubenswrapper[4765]: I1210 06:54:18.408622 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" event={"ID":"1d3be27b-32c4-4d59-80c9-ef75482bc20f","Type":"ContainerStarted","Data":"d1e9ad77ada864308efaa0d49d596cfd146cc91a7f38c25773b2b51a036d3742"} Dec 10 06:54:19 crc kubenswrapper[4765]: I1210 06:54:19.415506 4765 generic.go:334] "Generic (PLEG): container finished" podID="be6aa029-45f0-48ef-8c9f-0bebe1720386" containerID="5ca8cd889663014c19de20cf3b23f01051cff53feb1c055bf4944f4189063c6e" exitCode=0 Dec 10 06:54:19 crc kubenswrapper[4765]: I1210 06:54:19.415602 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" event={"ID":"be6aa029-45f0-48ef-8c9f-0bebe1720386","Type":"ContainerDied","Data":"5ca8cd889663014c19de20cf3b23f01051cff53feb1c055bf4944f4189063c6e"} Dec 10 06:54:19 crc kubenswrapper[4765]: I1210 06:54:19.419285 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" event={"ID":"1d3be27b-32c4-4d59-80c9-ef75482bc20f","Type":"ContainerStarted","Data":"e0ed64817ac7b74f04381f842929e5fa6e62c20b3800a64487bea66948a2f304"} Dec 10 06:54:19 crc kubenswrapper[4765]: I1210 06:54:19.419444 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:19 crc kubenswrapper[4765]: I1210 06:54:19.421319 4765 generic.go:334] "Generic (PLEG): container finished" podID="dcca5aba-a84b-414e-a345-2304ebb500c0" containerID="99d92218bde2c5be72d7b6256c01de3d84c001cb5d144183cd0dc3c20fa89417" exitCode=0 Dec 10 06:54:19 crc kubenswrapper[4765]: I1210 06:54:19.421352 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" event={"ID":"dcca5aba-a84b-414e-a345-2304ebb500c0","Type":"ContainerDied","Data":"99d92218bde2c5be72d7b6256c01de3d84c001cb5d144183cd0dc3c20fa89417"} Dec 10 06:54:19 crc kubenswrapper[4765]: I1210 06:54:19.440576 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" podStartSLOduration=2.440559367 podStartE2EDuration="2.440559367s" podCreationTimestamp="2025-12-10 06:54:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:54:19.435682168 +0000 UTC m=+379.162347484" watchObservedRunningTime="2025-12-10 06:54:19.440559367 +0000 UTC m=+379.167224683" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.343117 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.371423 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw"] Dec 10 06:54:20 crc kubenswrapper[4765]: E1210 06:54:20.371676 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcca5aba-a84b-414e-a345-2304ebb500c0" containerName="route-controller-manager" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.371687 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcca5aba-a84b-414e-a345-2304ebb500c0" containerName="route-controller-manager" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.371784 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcca5aba-a84b-414e-a345-2304ebb500c0" containerName="route-controller-manager" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.372192 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.384556 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw"] Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.426662 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.428428 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" event={"ID":"be6aa029-45f0-48ef-8c9f-0bebe1720386","Type":"ContainerDied","Data":"74a259299352baba4ac1554b8d6bf2bd4213337e9a32efccfebce3caf5f42e76"} Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.428499 4765 scope.go:117] "RemoveContainer" containerID="5ca8cd889663014c19de20cf3b23f01051cff53feb1c055bf4944f4189063c6e" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.428764 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5964cbcb45-ctwd2" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.430933 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.430972 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb" event={"ID":"dcca5aba-a84b-414e-a345-2304ebb500c0","Type":"ContainerDied","Data":"228608f467dc1dea30b8691d12ebb6cc5ea46552ef05c7052d98fe5df4d96f7b"} Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.451254 4765 scope.go:117] "RemoveContainer" containerID="99d92218bde2c5be72d7b6256c01de3d84c001cb5d144183cd0dc3c20fa89417" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.455897 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dcca5aba-a84b-414e-a345-2304ebb500c0-client-ca\") pod \"dcca5aba-a84b-414e-a345-2304ebb500c0\" (UID: \"dcca5aba-a84b-414e-a345-2304ebb500c0\") " Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.455934 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcca5aba-a84b-414e-a345-2304ebb500c0-config\") pod \"dcca5aba-a84b-414e-a345-2304ebb500c0\" (UID: \"dcca5aba-a84b-414e-a345-2304ebb500c0\") " Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.455984 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg7cx\" (UniqueName: \"kubernetes.io/projected/dcca5aba-a84b-414e-a345-2304ebb500c0-kube-api-access-qg7cx\") pod \"dcca5aba-a84b-414e-a345-2304ebb500c0\" (UID: \"dcca5aba-a84b-414e-a345-2304ebb500c0\") " Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.456044 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcca5aba-a84b-414e-a345-2304ebb500c0-serving-cert\") pod \"dcca5aba-a84b-414e-a345-2304ebb500c0\" (UID: \"dcca5aba-a84b-414e-a345-2304ebb500c0\") " Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.457560 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcca5aba-a84b-414e-a345-2304ebb500c0-client-ca" (OuterVolumeSpecName: "client-ca") pod "dcca5aba-a84b-414e-a345-2304ebb500c0" (UID: "dcca5aba-a84b-414e-a345-2304ebb500c0"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.457659 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcca5aba-a84b-414e-a345-2304ebb500c0-config" (OuterVolumeSpecName: "config") pod "dcca5aba-a84b-414e-a345-2304ebb500c0" (UID: "dcca5aba-a84b-414e-a345-2304ebb500c0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.461257 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcca5aba-a84b-414e-a345-2304ebb500c0-kube-api-access-qg7cx" (OuterVolumeSpecName: "kube-api-access-qg7cx") pod "dcca5aba-a84b-414e-a345-2304ebb500c0" (UID: "dcca5aba-a84b-414e-a345-2304ebb500c0"). InnerVolumeSpecName "kube-api-access-qg7cx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.466184 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcca5aba-a84b-414e-a345-2304ebb500c0-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "dcca5aba-a84b-414e-a345-2304ebb500c0" (UID: "dcca5aba-a84b-414e-a345-2304ebb500c0"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.557321 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be6aa029-45f0-48ef-8c9f-0bebe1720386-serving-cert\") pod \"be6aa029-45f0-48ef-8c9f-0bebe1720386\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.557366 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nl9gg\" (UniqueName: \"kubernetes.io/projected/be6aa029-45f0-48ef-8c9f-0bebe1720386-kube-api-access-nl9gg\") pod \"be6aa029-45f0-48ef-8c9f-0bebe1720386\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.557421 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/be6aa029-45f0-48ef-8c9f-0bebe1720386-client-ca\") pod \"be6aa029-45f0-48ef-8c9f-0bebe1720386\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.557451 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/be6aa029-45f0-48ef-8c9f-0bebe1720386-proxy-ca-bundles\") pod \"be6aa029-45f0-48ef-8c9f-0bebe1720386\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.557476 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be6aa029-45f0-48ef-8c9f-0bebe1720386-config\") pod \"be6aa029-45f0-48ef-8c9f-0bebe1720386\" (UID: \"be6aa029-45f0-48ef-8c9f-0bebe1720386\") " Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.557696 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f91e6fa-69ae-41cf-9bc6-30397e74687d-client-ca\") pod \"route-controller-manager-5b495486f8-5lrhw\" (UID: \"1f91e6fa-69ae-41cf-9bc6-30397e74687d\") " pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.557730 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f91e6fa-69ae-41cf-9bc6-30397e74687d-serving-cert\") pod \"route-controller-manager-5b495486f8-5lrhw\" (UID: \"1f91e6fa-69ae-41cf-9bc6-30397e74687d\") " pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.557765 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f91e6fa-69ae-41cf-9bc6-30397e74687d-config\") pod \"route-controller-manager-5b495486f8-5lrhw\" (UID: \"1f91e6fa-69ae-41cf-9bc6-30397e74687d\") " pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.557797 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq8cn\" (UniqueName: \"kubernetes.io/projected/1f91e6fa-69ae-41cf-9bc6-30397e74687d-kube-api-access-mq8cn\") pod \"route-controller-manager-5b495486f8-5lrhw\" (UID: \"1f91e6fa-69ae-41cf-9bc6-30397e74687d\") " pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.557940 4765 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dcca5aba-a84b-414e-a345-2304ebb500c0-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.557962 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcca5aba-a84b-414e-a345-2304ebb500c0-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.557974 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg7cx\" (UniqueName: \"kubernetes.io/projected/dcca5aba-a84b-414e-a345-2304ebb500c0-kube-api-access-qg7cx\") on node \"crc\" DevicePath \"\"" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.557986 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcca5aba-a84b-414e-a345-2304ebb500c0-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.558552 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be6aa029-45f0-48ef-8c9f-0bebe1720386-client-ca" (OuterVolumeSpecName: "client-ca") pod "be6aa029-45f0-48ef-8c9f-0bebe1720386" (UID: "be6aa029-45f0-48ef-8c9f-0bebe1720386"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.559064 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be6aa029-45f0-48ef-8c9f-0bebe1720386-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "be6aa029-45f0-48ef-8c9f-0bebe1720386" (UID: "be6aa029-45f0-48ef-8c9f-0bebe1720386"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.559760 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be6aa029-45f0-48ef-8c9f-0bebe1720386-config" (OuterVolumeSpecName: "config") pod "be6aa029-45f0-48ef-8c9f-0bebe1720386" (UID: "be6aa029-45f0-48ef-8c9f-0bebe1720386"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.560377 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be6aa029-45f0-48ef-8c9f-0bebe1720386-kube-api-access-nl9gg" (OuterVolumeSpecName: "kube-api-access-nl9gg") pod "be6aa029-45f0-48ef-8c9f-0bebe1720386" (UID: "be6aa029-45f0-48ef-8c9f-0bebe1720386"). InnerVolumeSpecName "kube-api-access-nl9gg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.560802 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be6aa029-45f0-48ef-8c9f-0bebe1720386-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "be6aa029-45f0-48ef-8c9f-0bebe1720386" (UID: "be6aa029-45f0-48ef-8c9f-0bebe1720386"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.659419 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f91e6fa-69ae-41cf-9bc6-30397e74687d-client-ca\") pod \"route-controller-manager-5b495486f8-5lrhw\" (UID: \"1f91e6fa-69ae-41cf-9bc6-30397e74687d\") " pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.659458 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f91e6fa-69ae-41cf-9bc6-30397e74687d-serving-cert\") pod \"route-controller-manager-5b495486f8-5lrhw\" (UID: \"1f91e6fa-69ae-41cf-9bc6-30397e74687d\") " pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.659480 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f91e6fa-69ae-41cf-9bc6-30397e74687d-config\") pod \"route-controller-manager-5b495486f8-5lrhw\" (UID: \"1f91e6fa-69ae-41cf-9bc6-30397e74687d\") " pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.659505 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq8cn\" (UniqueName: \"kubernetes.io/projected/1f91e6fa-69ae-41cf-9bc6-30397e74687d-kube-api-access-mq8cn\") pod \"route-controller-manager-5b495486f8-5lrhw\" (UID: \"1f91e6fa-69ae-41cf-9bc6-30397e74687d\") " pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.659551 4765 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be6aa029-45f0-48ef-8c9f-0bebe1720386-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.659561 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nl9gg\" (UniqueName: \"kubernetes.io/projected/be6aa029-45f0-48ef-8c9f-0bebe1720386-kube-api-access-nl9gg\") on node \"crc\" DevicePath \"\"" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.659572 4765 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/be6aa029-45f0-48ef-8c9f-0bebe1720386-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.659580 4765 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/be6aa029-45f0-48ef-8c9f-0bebe1720386-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.659587 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be6aa029-45f0-48ef-8c9f-0bebe1720386-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.660622 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f91e6fa-69ae-41cf-9bc6-30397e74687d-client-ca\") pod \"route-controller-manager-5b495486f8-5lrhw\" (UID: \"1f91e6fa-69ae-41cf-9bc6-30397e74687d\") " pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.661172 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f91e6fa-69ae-41cf-9bc6-30397e74687d-config\") pod \"route-controller-manager-5b495486f8-5lrhw\" (UID: \"1f91e6fa-69ae-41cf-9bc6-30397e74687d\") " pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.663778 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f91e6fa-69ae-41cf-9bc6-30397e74687d-serving-cert\") pod \"route-controller-manager-5b495486f8-5lrhw\" (UID: \"1f91e6fa-69ae-41cf-9bc6-30397e74687d\") " pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.674710 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq8cn\" (UniqueName: \"kubernetes.io/projected/1f91e6fa-69ae-41cf-9bc6-30397e74687d-kube-api-access-mq8cn\") pod \"route-controller-manager-5b495486f8-5lrhw\" (UID: \"1f91e6fa-69ae-41cf-9bc6-30397e74687d\") " pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.724767 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.750163 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5964cbcb45-ctwd2"] Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.757609 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5964cbcb45-ctwd2"] Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.761513 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb"] Dec 10 06:54:20 crc kubenswrapper[4765]: I1210 06:54:20.764904 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5598468cdf-qcrzb"] Dec 10 06:54:21 crc kubenswrapper[4765]: I1210 06:54:21.099379 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw"] Dec 10 06:54:21 crc kubenswrapper[4765]: I1210 06:54:21.438847 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" event={"ID":"1f91e6fa-69ae-41cf-9bc6-30397e74687d","Type":"ContainerStarted","Data":"4e23a1ba7164860212f8a1dd154cdc72edf303a9ae717d2e91db40588eaa32ba"} Dec 10 06:54:21 crc kubenswrapper[4765]: I1210 06:54:21.440184 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" Dec 10 06:54:21 crc kubenswrapper[4765]: I1210 06:54:21.440222 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" event={"ID":"1f91e6fa-69ae-41cf-9bc6-30397e74687d","Type":"ContainerStarted","Data":"dd738ab801c43779b4865fcc75f9d1b9dec2a9d9ecc00af78ac2298a0209fbef"} Dec 10 06:54:21 crc kubenswrapper[4765]: I1210 06:54:21.461002 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" podStartSLOduration=3.4609781 podStartE2EDuration="3.4609781s" podCreationTimestamp="2025-12-10 06:54:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:54:21.457209612 +0000 UTC m=+381.183874948" watchObservedRunningTime="2025-12-10 06:54:21.4609781 +0000 UTC m=+381.187643416" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.001533 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5b495486f8-5lrhw" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.348923 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-khqgp" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.595470 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be6aa029-45f0-48ef-8c9f-0bebe1720386" path="/var/lib/kubelet/pods/be6aa029-45f0-48ef-8c9f-0bebe1720386/volumes" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.596126 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcca5aba-a84b-414e-a345-2304ebb500c0" path="/var/lib/kubelet/pods/dcca5aba-a84b-414e-a345-2304ebb500c0/volumes" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.612020 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7779c5d766-8jtc7"] Dec 10 06:54:22 crc kubenswrapper[4765]: E1210 06:54:22.612284 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be6aa029-45f0-48ef-8c9f-0bebe1720386" containerName="controller-manager" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.612295 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="be6aa029-45f0-48ef-8c9f-0bebe1720386" containerName="controller-manager" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.612401 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="be6aa029-45f0-48ef-8c9f-0bebe1720386" containerName="controller-manager" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.612741 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.614859 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.615333 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.616276 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.616484 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.616976 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.620685 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.622695 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7779c5d766-8jtc7"] Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.622939 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.785629 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbcwk\" (UniqueName: \"kubernetes.io/projected/537b64b6-74c5-4868-bf74-a1a13115d09e-kube-api-access-hbcwk\") pod \"controller-manager-7779c5d766-8jtc7\" (UID: \"537b64b6-74c5-4868-bf74-a1a13115d09e\") " pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.785679 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/537b64b6-74c5-4868-bf74-a1a13115d09e-client-ca\") pod \"controller-manager-7779c5d766-8jtc7\" (UID: \"537b64b6-74c5-4868-bf74-a1a13115d09e\") " pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.785729 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/537b64b6-74c5-4868-bf74-a1a13115d09e-config\") pod \"controller-manager-7779c5d766-8jtc7\" (UID: \"537b64b6-74c5-4868-bf74-a1a13115d09e\") " pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.785767 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/537b64b6-74c5-4868-bf74-a1a13115d09e-serving-cert\") pod \"controller-manager-7779c5d766-8jtc7\" (UID: \"537b64b6-74c5-4868-bf74-a1a13115d09e\") " pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.785797 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/537b64b6-74c5-4868-bf74-a1a13115d09e-proxy-ca-bundles\") pod \"controller-manager-7779c5d766-8jtc7\" (UID: \"537b64b6-74c5-4868-bf74-a1a13115d09e\") " pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.887515 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/537b64b6-74c5-4868-bf74-a1a13115d09e-proxy-ca-bundles\") pod \"controller-manager-7779c5d766-8jtc7\" (UID: \"537b64b6-74c5-4868-bf74-a1a13115d09e\") " pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.887615 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbcwk\" (UniqueName: \"kubernetes.io/projected/537b64b6-74c5-4868-bf74-a1a13115d09e-kube-api-access-hbcwk\") pod \"controller-manager-7779c5d766-8jtc7\" (UID: \"537b64b6-74c5-4868-bf74-a1a13115d09e\") " pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.887645 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/537b64b6-74c5-4868-bf74-a1a13115d09e-client-ca\") pod \"controller-manager-7779c5d766-8jtc7\" (UID: \"537b64b6-74c5-4868-bf74-a1a13115d09e\") " pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.887694 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/537b64b6-74c5-4868-bf74-a1a13115d09e-config\") pod \"controller-manager-7779c5d766-8jtc7\" (UID: \"537b64b6-74c5-4868-bf74-a1a13115d09e\") " pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.887729 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/537b64b6-74c5-4868-bf74-a1a13115d09e-serving-cert\") pod \"controller-manager-7779c5d766-8jtc7\" (UID: \"537b64b6-74c5-4868-bf74-a1a13115d09e\") " pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.889054 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/537b64b6-74c5-4868-bf74-a1a13115d09e-client-ca\") pod \"controller-manager-7779c5d766-8jtc7\" (UID: \"537b64b6-74c5-4868-bf74-a1a13115d09e\") " pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.889222 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/537b64b6-74c5-4868-bf74-a1a13115d09e-proxy-ca-bundles\") pod \"controller-manager-7779c5d766-8jtc7\" (UID: \"537b64b6-74c5-4868-bf74-a1a13115d09e\") " pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.889252 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/537b64b6-74c5-4868-bf74-a1a13115d09e-config\") pod \"controller-manager-7779c5d766-8jtc7\" (UID: \"537b64b6-74c5-4868-bf74-a1a13115d09e\") " pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.893480 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/537b64b6-74c5-4868-bf74-a1a13115d09e-serving-cert\") pod \"controller-manager-7779c5d766-8jtc7\" (UID: \"537b64b6-74c5-4868-bf74-a1a13115d09e\") " pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.903162 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbcwk\" (UniqueName: \"kubernetes.io/projected/537b64b6-74c5-4868-bf74-a1a13115d09e-kube-api-access-hbcwk\") pod \"controller-manager-7779c5d766-8jtc7\" (UID: \"537b64b6-74c5-4868-bf74-a1a13115d09e\") " pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:22 crc kubenswrapper[4765]: I1210 06:54:22.930461 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:23 crc kubenswrapper[4765]: I1210 06:54:23.363375 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7779c5d766-8jtc7"] Dec 10 06:54:23 crc kubenswrapper[4765]: W1210 06:54:23.367938 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod537b64b6_74c5_4868_bf74_a1a13115d09e.slice/crio-20252cab8c1b6d1bdd7c1de103551e81a0757da1addc2248044a39e2449447ae WatchSource:0}: Error finding container 20252cab8c1b6d1bdd7c1de103551e81a0757da1addc2248044a39e2449447ae: Status 404 returned error can't find the container with id 20252cab8c1b6d1bdd7c1de103551e81a0757da1addc2248044a39e2449447ae Dec 10 06:54:23 crc kubenswrapper[4765]: I1210 06:54:23.448417 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" event={"ID":"537b64b6-74c5-4868-bf74-a1a13115d09e","Type":"ContainerStarted","Data":"20252cab8c1b6d1bdd7c1de103551e81a0757da1addc2248044a39e2449447ae"} Dec 10 06:54:24 crc kubenswrapper[4765]: I1210 06:54:24.454730 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" event={"ID":"537b64b6-74c5-4868-bf74-a1a13115d09e","Type":"ContainerStarted","Data":"7ae53ac2f37c0c2a54f69a2eb2ff3e06007fe292660a31c9fc00596a46d793d3"} Dec 10 06:54:24 crc kubenswrapper[4765]: I1210 06:54:24.455295 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:24 crc kubenswrapper[4765]: I1210 06:54:24.461910 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" Dec 10 06:54:24 crc kubenswrapper[4765]: I1210 06:54:24.477330 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7779c5d766-8jtc7" podStartSLOduration=6.477309336 podStartE2EDuration="6.477309336s" podCreationTimestamp="2025-12-10 06:54:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:54:24.474484236 +0000 UTC m=+384.201149552" watchObservedRunningTime="2025-12-10 06:54:24.477309336 +0000 UTC m=+384.203974662" Dec 10 06:54:34 crc kubenswrapper[4765]: I1210 06:54:34.049878 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 06:54:34 crc kubenswrapper[4765]: I1210 06:54:34.050507 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 06:54:37 crc kubenswrapper[4765]: I1210 06:54:37.726679 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-p6s2f" Dec 10 06:54:37 crc kubenswrapper[4765]: I1210 06:54:37.780707 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wjlkl"] Dec 10 06:55:02 crc kubenswrapper[4765]: I1210 06:55:02.823728 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" podUID="83cb65fc-a542-4331-80d2-2ebccf5d2bff" containerName="registry" containerID="cri-o://7ec0818cdc0bdffab64d3d361356e88faec9ba357551a7363890c77de002dc8f" gracePeriod=30 Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.658078 4765 generic.go:334] "Generic (PLEG): container finished" podID="83cb65fc-a542-4331-80d2-2ebccf5d2bff" containerID="7ec0818cdc0bdffab64d3d361356e88faec9ba357551a7363890c77de002dc8f" exitCode=0 Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.658136 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" event={"ID":"83cb65fc-a542-4331-80d2-2ebccf5d2bff","Type":"ContainerDied","Data":"7ec0818cdc0bdffab64d3d361356e88faec9ba357551a7363890c77de002dc8f"} Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.736654 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.850387 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/83cb65fc-a542-4331-80d2-2ebccf5d2bff-registry-tls\") pod \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.850731 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/83cb65fc-a542-4331-80d2-2ebccf5d2bff-registry-certificates\") pod \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.850768 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qr2x\" (UniqueName: \"kubernetes.io/projected/83cb65fc-a542-4331-80d2-2ebccf5d2bff-kube-api-access-6qr2x\") pod \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.852146 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83cb65fc-a542-4331-80d2-2ebccf5d2bff-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "83cb65fc-a542-4331-80d2-2ebccf5d2bff" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.852193 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.852659 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/83cb65fc-a542-4331-80d2-2ebccf5d2bff-trusted-ca\") pod \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.852707 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/83cb65fc-a542-4331-80d2-2ebccf5d2bff-installation-pull-secrets\") pod \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.852740 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/83cb65fc-a542-4331-80d2-2ebccf5d2bff-bound-sa-token\") pod \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.852763 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/83cb65fc-a542-4331-80d2-2ebccf5d2bff-ca-trust-extracted\") pod \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\" (UID: \"83cb65fc-a542-4331-80d2-2ebccf5d2bff\") " Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.853042 4765 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/83cb65fc-a542-4331-80d2-2ebccf5d2bff-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.854789 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83cb65fc-a542-4331-80d2-2ebccf5d2bff-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "83cb65fc-a542-4331-80d2-2ebccf5d2bff" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.857589 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83cb65fc-a542-4331-80d2-2ebccf5d2bff-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "83cb65fc-a542-4331-80d2-2ebccf5d2bff" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.858143 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83cb65fc-a542-4331-80d2-2ebccf5d2bff-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "83cb65fc-a542-4331-80d2-2ebccf5d2bff" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.858227 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83cb65fc-a542-4331-80d2-2ebccf5d2bff-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "83cb65fc-a542-4331-80d2-2ebccf5d2bff" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.858435 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83cb65fc-a542-4331-80d2-2ebccf5d2bff-kube-api-access-6qr2x" (OuterVolumeSpecName: "kube-api-access-6qr2x") pod "83cb65fc-a542-4331-80d2-2ebccf5d2bff" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff"). InnerVolumeSpecName "kube-api-access-6qr2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.863153 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "83cb65fc-a542-4331-80d2-2ebccf5d2bff" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.870555 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83cb65fc-a542-4331-80d2-2ebccf5d2bff-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "83cb65fc-a542-4331-80d2-2ebccf5d2bff" (UID: "83cb65fc-a542-4331-80d2-2ebccf5d2bff"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.954742 4765 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/83cb65fc-a542-4331-80d2-2ebccf5d2bff-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.954786 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qr2x\" (UniqueName: \"kubernetes.io/projected/83cb65fc-a542-4331-80d2-2ebccf5d2bff-kube-api-access-6qr2x\") on node \"crc\" DevicePath \"\"" Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.954797 4765 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/83cb65fc-a542-4331-80d2-2ebccf5d2bff-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.954806 4765 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/83cb65fc-a542-4331-80d2-2ebccf5d2bff-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.954814 4765 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/83cb65fc-a542-4331-80d2-2ebccf5d2bff-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 10 06:55:03 crc kubenswrapper[4765]: I1210 06:55:03.954823 4765 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/83cb65fc-a542-4331-80d2-2ebccf5d2bff-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 10 06:55:04 crc kubenswrapper[4765]: I1210 06:55:04.049546 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 06:55:04 crc kubenswrapper[4765]: I1210 06:55:04.049599 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 06:55:04 crc kubenswrapper[4765]: I1210 06:55:04.049644 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 06:55:04 crc kubenswrapper[4765]: I1210 06:55:04.050172 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7376f4bc9252f299181bb5fb7be283e9a1bce4b9e100c58bf80840e511f0752e"} pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 06:55:04 crc kubenswrapper[4765]: I1210 06:55:04.050234 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" containerID="cri-o://7376f4bc9252f299181bb5fb7be283e9a1bce4b9e100c58bf80840e511f0752e" gracePeriod=600 Dec 10 06:55:04 crc kubenswrapper[4765]: I1210 06:55:04.664295 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" event={"ID":"83cb65fc-a542-4331-80d2-2ebccf5d2bff","Type":"ContainerDied","Data":"b95ac3ba41ca52876493524c9fc9f60034cd2019c68bdf525ff4d19d648dfd20"} Dec 10 06:55:04 crc kubenswrapper[4765]: I1210 06:55:04.664316 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-wjlkl" Dec 10 06:55:04 crc kubenswrapper[4765]: I1210 06:55:04.664618 4765 scope.go:117] "RemoveContainer" containerID="7ec0818cdc0bdffab64d3d361356e88faec9ba357551a7363890c77de002dc8f" Dec 10 06:55:04 crc kubenswrapper[4765]: I1210 06:55:04.668212 4765 generic.go:334] "Generic (PLEG): container finished" podID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerID="7376f4bc9252f299181bb5fb7be283e9a1bce4b9e100c58bf80840e511f0752e" exitCode=0 Dec 10 06:55:04 crc kubenswrapper[4765]: I1210 06:55:04.668252 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerDied","Data":"7376f4bc9252f299181bb5fb7be283e9a1bce4b9e100c58bf80840e511f0752e"} Dec 10 06:55:04 crc kubenswrapper[4765]: I1210 06:55:04.668280 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"d5e023799d44ef5db29f950e818bd172f5ecb1b760503fe8e90e8ba03b4e7b9e"} Dec 10 06:55:04 crc kubenswrapper[4765]: I1210 06:55:04.678552 4765 scope.go:117] "RemoveContainer" containerID="f429ac9aa55d604ad47d126dc454bd2e4e1bb129df9820e340e7bc1a43e1f0fe" Dec 10 06:55:04 crc kubenswrapper[4765]: I1210 06:55:04.696933 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wjlkl"] Dec 10 06:55:04 crc kubenswrapper[4765]: I1210 06:55:04.701053 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wjlkl"] Dec 10 06:55:06 crc kubenswrapper[4765]: I1210 06:55:06.598953 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83cb65fc-a542-4331-80d2-2ebccf5d2bff" path="/var/lib/kubelet/pods/83cb65fc-a542-4331-80d2-2ebccf5d2bff/volumes" Dec 10 06:57:04 crc kubenswrapper[4765]: I1210 06:57:04.049952 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 06:57:04 crc kubenswrapper[4765]: I1210 06:57:04.051157 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 06:57:34 crc kubenswrapper[4765]: I1210 06:57:34.049129 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 06:57:34 crc kubenswrapper[4765]: I1210 06:57:34.049723 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 06:58:04 crc kubenswrapper[4765]: I1210 06:58:04.049829 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 06:58:04 crc kubenswrapper[4765]: I1210 06:58:04.050479 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 06:58:04 crc kubenswrapper[4765]: I1210 06:58:04.050534 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 06:58:04 crc kubenswrapper[4765]: I1210 06:58:04.051158 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d5e023799d44ef5db29f950e818bd172f5ecb1b760503fe8e90e8ba03b4e7b9e"} pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 06:58:04 crc kubenswrapper[4765]: I1210 06:58:04.051222 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" containerID="cri-o://d5e023799d44ef5db29f950e818bd172f5ecb1b760503fe8e90e8ba03b4e7b9e" gracePeriod=600 Dec 10 06:58:04 crc kubenswrapper[4765]: I1210 06:58:04.889339 4765 generic.go:334] "Generic (PLEG): container finished" podID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerID="d5e023799d44ef5db29f950e818bd172f5ecb1b760503fe8e90e8ba03b4e7b9e" exitCode=0 Dec 10 06:58:04 crc kubenswrapper[4765]: I1210 06:58:04.889401 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerDied","Data":"d5e023799d44ef5db29f950e818bd172f5ecb1b760503fe8e90e8ba03b4e7b9e"} Dec 10 06:58:04 crc kubenswrapper[4765]: I1210 06:58:04.889682 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"c59ae59e8df6d4b5e877dd0073edd70b486d51e68aa6826855c2bd6ec77d4d30"} Dec 10 06:58:04 crc kubenswrapper[4765]: I1210 06:58:04.889743 4765 scope.go:117] "RemoveContainer" containerID="7376f4bc9252f299181bb5fb7be283e9a1bce4b9e100c58bf80840e511f0752e" Dec 10 06:58:53 crc kubenswrapper[4765]: I1210 06:58:53.675516 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5wj7r"] Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.118412 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovn-controller" containerID="cri-o://594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe" gracePeriod=30 Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.118512 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="sbdb" containerID="cri-o://7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c" gracePeriod=30 Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.118543 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovn-acl-logging" containerID="cri-o://46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f" gracePeriod=30 Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.118561 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd" gracePeriod=30 Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.118529 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="kube-rbac-proxy-node" containerID="cri-o://9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176" gracePeriod=30 Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.118641 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="nbdb" containerID="cri-o://85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85" gracePeriod=30 Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.118597 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="northd" containerID="cri-o://b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230" gracePeriod=30 Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.144850 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovnkube-controller" containerID="cri-o://ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495" gracePeriod=30 Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.374541 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovnkube-controller/3.log" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.377191 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovn-acl-logging/0.log" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.377849 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovn-controller/0.log" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.378298 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387365 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-etc-openvswitch\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387402 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-run-openvswitch\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387424 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-ovn-node-metrics-cert\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387443 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-env-overrides\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387461 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-var-lib-openvswitch\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387468 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387512 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387516 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387473 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-cni-bin\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387561 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-run-systemd\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387584 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-slash\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387669 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-node-log\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387687 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-run-ovn-kubernetes\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387598 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387631 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-slash" (OuterVolumeSpecName: "host-slash") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387742 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-node-log" (OuterVolumeSpecName: "node-log") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387768 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-cni-netd\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387808 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387829 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387814 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.387835 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-ovnkube-config\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388075 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388181 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-systemd-units\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388202 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388256 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-ovnkube-script-lib\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388314 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-run-ovn\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388343 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lsdwg\" (UniqueName: \"kubernetes.io/projected/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-kube-api-access-lsdwg\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388368 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-run-netns\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388392 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-kubelet\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388412 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-var-lib-cni-networks-ovn-kubernetes\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388433 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-log-socket\") pod \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\" (UID: \"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3\") " Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388608 4765 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388623 4765 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388635 4765 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388647 4765 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388658 4765 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388668 4765 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-slash\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388679 4765 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-node-log\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388690 4765 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388701 4765 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388712 4765 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388722 4765 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.388750 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-log-socket" (OuterVolumeSpecName: "log-socket") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.389047 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.389077 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.389707 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.389739 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.389765 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.394011 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-kube-api-access-lsdwg" (OuterVolumeSpecName: "kube-api-access-lsdwg") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "kube-api-access-lsdwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.394021 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.401307 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" (UID: "8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.427516 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-rrd5r"] Dec 10 06:58:54 crc kubenswrapper[4765]: E1210 06:58:54.428013 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="kube-rbac-proxy-node" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.428096 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="kube-rbac-proxy-node" Dec 10 06:58:54 crc kubenswrapper[4765]: E1210 06:58:54.428157 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="nbdb" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.428204 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="nbdb" Dec 10 06:58:54 crc kubenswrapper[4765]: E1210 06:58:54.428255 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovnkube-controller" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.428299 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovnkube-controller" Dec 10 06:58:54 crc kubenswrapper[4765]: E1210 06:58:54.428351 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovnkube-controller" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.428406 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovnkube-controller" Dec 10 06:58:54 crc kubenswrapper[4765]: E1210 06:58:54.428457 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83cb65fc-a542-4331-80d2-2ebccf5d2bff" containerName="registry" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.428514 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="83cb65fc-a542-4331-80d2-2ebccf5d2bff" containerName="registry" Dec 10 06:58:54 crc kubenswrapper[4765]: E1210 06:58:54.428566 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovn-acl-logging" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.428617 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovn-acl-logging" Dec 10 06:58:54 crc kubenswrapper[4765]: E1210 06:58:54.428665 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovn-controller" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.428715 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovn-controller" Dec 10 06:58:54 crc kubenswrapper[4765]: E1210 06:58:54.428766 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="kube-rbac-proxy-ovn-metrics" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.428816 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="kube-rbac-proxy-ovn-metrics" Dec 10 06:58:54 crc kubenswrapper[4765]: E1210 06:58:54.428867 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovnkube-controller" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.428916 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovnkube-controller" Dec 10 06:58:54 crc kubenswrapper[4765]: E1210 06:58:54.428968 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="northd" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.429030 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="northd" Dec 10 06:58:54 crc kubenswrapper[4765]: E1210 06:58:54.429091 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="kubecfg-setup" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.429148 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="kubecfg-setup" Dec 10 06:58:54 crc kubenswrapper[4765]: E1210 06:58:54.429198 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="sbdb" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.429245 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="sbdb" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.429377 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="northd" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.429434 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovnkube-controller" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.429485 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovnkube-controller" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.429535 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="83cb65fc-a542-4331-80d2-2ebccf5d2bff" containerName="registry" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.429585 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="kube-rbac-proxy-ovn-metrics" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.429633 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovnkube-controller" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.429704 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="kube-rbac-proxy-node" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.429755 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovn-controller" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.429812 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovn-acl-logging" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.429859 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="nbdb" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.429906 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="sbdb" Dec 10 06:58:54 crc kubenswrapper[4765]: E1210 06:58:54.430078 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovnkube-controller" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.430158 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovnkube-controller" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.430292 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovnkube-controller" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.430354 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovnkube-controller" Dec 10 06:58:54 crc kubenswrapper[4765]: E1210 06:58:54.430486 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovnkube-controller" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.430540 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerName="ovnkube-controller" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.432187 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490103 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-etc-openvswitch\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490160 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-cni-netd\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490220 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-node-log\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490255 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/286a2702-d1be-46bc-9313-81bef0618ff9-env-overrides\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490279 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/286a2702-d1be-46bc-9313-81bef0618ff9-ovnkube-script-lib\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490310 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490341 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4rqp\" (UniqueName: \"kubernetes.io/projected/286a2702-d1be-46bc-9313-81bef0618ff9-kube-api-access-f4rqp\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490390 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-run-systemd\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490423 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-var-lib-openvswitch\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490449 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-log-socket\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490473 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-cni-bin\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490496 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/286a2702-d1be-46bc-9313-81bef0618ff9-ovn-node-metrics-cert\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490513 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/286a2702-d1be-46bc-9313-81bef0618ff9-ovnkube-config\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490562 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-run-ovn\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490583 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-run-netns\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490608 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-slash\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490625 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-systemd-units\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490641 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-kubelet\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490657 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-run-ovn-kubernetes\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490675 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-run-openvswitch\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490812 4765 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490843 4765 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490852 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lsdwg\" (UniqueName: \"kubernetes.io/projected/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-kube-api-access-lsdwg\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490863 4765 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490872 4765 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490880 4765 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490888 4765 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-log-socket\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490896 4765 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.490904 4765 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.592594 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-etc-openvswitch\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.592656 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-cni-netd\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.592683 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-node-log\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.592708 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/286a2702-d1be-46bc-9313-81bef0618ff9-env-overrides\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.592769 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/286a2702-d1be-46bc-9313-81bef0618ff9-ovnkube-script-lib\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.592805 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-node-log\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.592842 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.592766 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-cni-netd\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.592792 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.592910 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4rqp\" (UniqueName: \"kubernetes.io/projected/286a2702-d1be-46bc-9313-81bef0618ff9-kube-api-access-f4rqp\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.592764 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-etc-openvswitch\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.593118 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-run-systemd\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.593325 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-run-systemd\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.593374 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-var-lib-openvswitch\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.593398 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-log-socket\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.593454 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-var-lib-openvswitch\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.593515 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-cni-bin\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.593535 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-log-socket\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.593566 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/286a2702-d1be-46bc-9313-81bef0618ff9-ovn-node-metrics-cert\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.593589 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/286a2702-d1be-46bc-9313-81bef0618ff9-ovnkube-config\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.594140 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-run-ovn\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.594163 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-run-netns\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.594187 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-slash\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.593610 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-cni-bin\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.594209 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-run-ovn\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.594224 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-systemd-units\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.594205 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-systemd-units\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.594252 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-kubelet\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.594252 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-slash\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.594261 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-run-netns\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.593963 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/286a2702-d1be-46bc-9313-81bef0618ff9-ovnkube-script-lib\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.594291 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-kubelet\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.593691 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/286a2702-d1be-46bc-9313-81bef0618ff9-env-overrides\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.594286 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-run-ovn-kubernetes\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.594301 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/286a2702-d1be-46bc-9313-81bef0618ff9-ovnkube-config\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.594269 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-host-run-ovn-kubernetes\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.594387 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-run-openvswitch\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.594450 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/286a2702-d1be-46bc-9313-81bef0618ff9-run-openvswitch\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.596799 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/286a2702-d1be-46bc-9313-81bef0618ff9-ovn-node-metrics-cert\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.611112 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4rqp\" (UniqueName: \"kubernetes.io/projected/286a2702-d1be-46bc-9313-81bef0618ff9-kube-api-access-f4rqp\") pod \"ovnkube-node-rrd5r\" (UID: \"286a2702-d1be-46bc-9313-81bef0618ff9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:54 crc kubenswrapper[4765]: I1210 06:58:54.751371 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.122930 4765 generic.go:334] "Generic (PLEG): container finished" podID="286a2702-d1be-46bc-9313-81bef0618ff9" containerID="e5a1d615cd459afafddf85145e799a54a0c3a8e317aa468bee9372ec22feacb4" exitCode=0 Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.123005 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" event={"ID":"286a2702-d1be-46bc-9313-81bef0618ff9","Type":"ContainerDied","Data":"e5a1d615cd459afafddf85145e799a54a0c3a8e317aa468bee9372ec22feacb4"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.123306 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" event={"ID":"286a2702-d1be-46bc-9313-81bef0618ff9","Type":"ContainerStarted","Data":"7054b7b9285cfa46eee7ec7930c4587d36b3f12f490323238035329dafe9d02c"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.125238 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r78vd_d5227381-9852-49ce-96f1-220c42aab12a/kube-multus/2.log" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.125807 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r78vd_d5227381-9852-49ce-96f1-220c42aab12a/kube-multus/1.log" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.125851 4765 generic.go:334] "Generic (PLEG): container finished" podID="d5227381-9852-49ce-96f1-220c42aab12a" containerID="b68f4e569b7eeef5928122abf99162669328f71c395b2c29915061eb90e372b3" exitCode=2 Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.125887 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-r78vd" event={"ID":"d5227381-9852-49ce-96f1-220c42aab12a","Type":"ContainerDied","Data":"b68f4e569b7eeef5928122abf99162669328f71c395b2c29915061eb90e372b3"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.125937 4765 scope.go:117] "RemoveContainer" containerID="1bd98078a25ccee3c5a9a8905539343e090e5867d0c53c6a6966075c2d0cc6ae" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.126753 4765 scope.go:117] "RemoveContainer" containerID="b68f4e569b7eeef5928122abf99162669328f71c395b2c29915061eb90e372b3" Dec 10 06:58:55 crc kubenswrapper[4765]: E1210 06:58:55.127050 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-r78vd_openshift-multus(d5227381-9852-49ce-96f1-220c42aab12a)\"" pod="openshift-multus/multus-r78vd" podUID="d5227381-9852-49ce-96f1-220c42aab12a" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.130357 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovnkube-controller/3.log" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.132858 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovn-acl-logging/0.log" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.134216 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5wj7r_8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/ovn-controller/0.log" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.134842 4765 generic.go:334] "Generic (PLEG): container finished" podID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerID="ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495" exitCode=0 Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.134870 4765 generic.go:334] "Generic (PLEG): container finished" podID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerID="7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c" exitCode=0 Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.134879 4765 generic.go:334] "Generic (PLEG): container finished" podID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerID="85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85" exitCode=0 Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.134891 4765 generic.go:334] "Generic (PLEG): container finished" podID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerID="b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230" exitCode=0 Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.134897 4765 generic.go:334] "Generic (PLEG): container finished" podID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerID="28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd" exitCode=0 Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.134904 4765 generic.go:334] "Generic (PLEG): container finished" podID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerID="9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176" exitCode=0 Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.134910 4765 generic.go:334] "Generic (PLEG): container finished" podID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerID="46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f" exitCode=143 Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.134918 4765 generic.go:334] "Generic (PLEG): container finished" podID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" containerID="594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe" exitCode=143 Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.134927 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.134914 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerDied","Data":"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135234 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerDied","Data":"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135257 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerDied","Data":"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135272 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerDied","Data":"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135285 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerDied","Data":"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135300 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerDied","Data":"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135314 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135329 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135337 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135345 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135353 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135360 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135370 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135377 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135385 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135392 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135402 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerDied","Data":"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135414 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135423 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135430 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135438 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135446 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135454 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135467 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135475 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135483 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135490 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135501 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerDied","Data":"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135514 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135524 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135532 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135539 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135546 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135553 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135561 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135568 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135575 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135582 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135592 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5wj7r" event={"ID":"8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3","Type":"ContainerDied","Data":"45fe4e94f0afb580ee340cea8197c0bdaa468cfec55a6e2291201bd627e0d98e"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135602 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135611 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135618 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135628 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135636 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135643 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135650 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135657 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135665 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.135671 4765 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d"} Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.161918 4765 scope.go:117] "RemoveContainer" containerID="ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.203188 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5wj7r"] Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.205897 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5wj7r"] Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.210192 4765 scope.go:117] "RemoveContainer" containerID="e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.228488 4765 scope.go:117] "RemoveContainer" containerID="7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.262020 4765 scope.go:117] "RemoveContainer" containerID="85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.278401 4765 scope.go:117] "RemoveContainer" containerID="b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.292933 4765 scope.go:117] "RemoveContainer" containerID="28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.317203 4765 scope.go:117] "RemoveContainer" containerID="9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.330022 4765 scope.go:117] "RemoveContainer" containerID="46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.366912 4765 scope.go:117] "RemoveContainer" containerID="594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.384067 4765 scope.go:117] "RemoveContainer" containerID="575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.409421 4765 scope.go:117] "RemoveContainer" containerID="ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495" Dec 10 06:58:55 crc kubenswrapper[4765]: E1210 06:58:55.410895 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495\": container with ID starting with ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495 not found: ID does not exist" containerID="ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.410931 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495"} err="failed to get container status \"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495\": rpc error: code = NotFound desc = could not find container \"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495\": container with ID starting with ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.410961 4765 scope.go:117] "RemoveContainer" containerID="e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839" Dec 10 06:58:55 crc kubenswrapper[4765]: E1210 06:58:55.411469 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839\": container with ID starting with e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839 not found: ID does not exist" containerID="e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.411523 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839"} err="failed to get container status \"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839\": rpc error: code = NotFound desc = could not find container \"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839\": container with ID starting with e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.411554 4765 scope.go:117] "RemoveContainer" containerID="7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c" Dec 10 06:58:55 crc kubenswrapper[4765]: E1210 06:58:55.411968 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\": container with ID starting with 7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c not found: ID does not exist" containerID="7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.412076 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c"} err="failed to get container status \"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\": rpc error: code = NotFound desc = could not find container \"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\": container with ID starting with 7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.412160 4765 scope.go:117] "RemoveContainer" containerID="85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85" Dec 10 06:58:55 crc kubenswrapper[4765]: E1210 06:58:55.412521 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\": container with ID starting with 85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85 not found: ID does not exist" containerID="85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.412547 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85"} err="failed to get container status \"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\": rpc error: code = NotFound desc = could not find container \"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\": container with ID starting with 85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.412564 4765 scope.go:117] "RemoveContainer" containerID="b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230" Dec 10 06:58:55 crc kubenswrapper[4765]: E1210 06:58:55.412923 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\": container with ID starting with b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230 not found: ID does not exist" containerID="b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.413053 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230"} err="failed to get container status \"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\": rpc error: code = NotFound desc = could not find container \"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\": container with ID starting with b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.414963 4765 scope.go:117] "RemoveContainer" containerID="28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd" Dec 10 06:58:55 crc kubenswrapper[4765]: E1210 06:58:55.415708 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\": container with ID starting with 28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd not found: ID does not exist" containerID="28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.415749 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd"} err="failed to get container status \"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\": rpc error: code = NotFound desc = could not find container \"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\": container with ID starting with 28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.415781 4765 scope.go:117] "RemoveContainer" containerID="9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176" Dec 10 06:58:55 crc kubenswrapper[4765]: E1210 06:58:55.416156 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\": container with ID starting with 9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176 not found: ID does not exist" containerID="9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.416189 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176"} err="failed to get container status \"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\": rpc error: code = NotFound desc = could not find container \"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\": container with ID starting with 9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.416210 4765 scope.go:117] "RemoveContainer" containerID="46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f" Dec 10 06:58:55 crc kubenswrapper[4765]: E1210 06:58:55.416613 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\": container with ID starting with 46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f not found: ID does not exist" containerID="46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.416712 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f"} err="failed to get container status \"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\": rpc error: code = NotFound desc = could not find container \"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\": container with ID starting with 46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.416807 4765 scope.go:117] "RemoveContainer" containerID="594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe" Dec 10 06:58:55 crc kubenswrapper[4765]: E1210 06:58:55.417305 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\": container with ID starting with 594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe not found: ID does not exist" containerID="594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.417336 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe"} err="failed to get container status \"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\": rpc error: code = NotFound desc = could not find container \"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\": container with ID starting with 594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.417355 4765 scope.go:117] "RemoveContainer" containerID="575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d" Dec 10 06:58:55 crc kubenswrapper[4765]: E1210 06:58:55.417625 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\": container with ID starting with 575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d not found: ID does not exist" containerID="575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.417729 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d"} err="failed to get container status \"575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\": rpc error: code = NotFound desc = could not find container \"575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\": container with ID starting with 575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.417825 4765 scope.go:117] "RemoveContainer" containerID="ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.418187 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495"} err="failed to get container status \"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495\": rpc error: code = NotFound desc = could not find container \"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495\": container with ID starting with ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.418315 4765 scope.go:117] "RemoveContainer" containerID="e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.418765 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839"} err="failed to get container status \"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839\": rpc error: code = NotFound desc = could not find container \"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839\": container with ID starting with e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.418868 4765 scope.go:117] "RemoveContainer" containerID="7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.419252 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c"} err="failed to get container status \"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\": rpc error: code = NotFound desc = could not find container \"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\": container with ID starting with 7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.419278 4765 scope.go:117] "RemoveContainer" containerID="85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.419575 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85"} err="failed to get container status \"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\": rpc error: code = NotFound desc = could not find container \"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\": container with ID starting with 85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.419677 4765 scope.go:117] "RemoveContainer" containerID="b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.420011 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230"} err="failed to get container status \"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\": rpc error: code = NotFound desc = could not find container \"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\": container with ID starting with b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.420038 4765 scope.go:117] "RemoveContainer" containerID="28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.420273 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd"} err="failed to get container status \"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\": rpc error: code = NotFound desc = could not find container \"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\": container with ID starting with 28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.420295 4765 scope.go:117] "RemoveContainer" containerID="9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.420918 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176"} err="failed to get container status \"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\": rpc error: code = NotFound desc = could not find container \"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\": container with ID starting with 9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.420939 4765 scope.go:117] "RemoveContainer" containerID="46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.421179 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f"} err="failed to get container status \"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\": rpc error: code = NotFound desc = could not find container \"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\": container with ID starting with 46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.421206 4765 scope.go:117] "RemoveContainer" containerID="594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.421569 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe"} err="failed to get container status \"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\": rpc error: code = NotFound desc = could not find container \"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\": container with ID starting with 594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.421590 4765 scope.go:117] "RemoveContainer" containerID="575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.421842 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d"} err="failed to get container status \"575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\": rpc error: code = NotFound desc = could not find container \"575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\": container with ID starting with 575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.421864 4765 scope.go:117] "RemoveContainer" containerID="ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.422069 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495"} err="failed to get container status \"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495\": rpc error: code = NotFound desc = could not find container \"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495\": container with ID starting with ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.422095 4765 scope.go:117] "RemoveContainer" containerID="e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.422294 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839"} err="failed to get container status \"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839\": rpc error: code = NotFound desc = could not find container \"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839\": container with ID starting with e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.422312 4765 scope.go:117] "RemoveContainer" containerID="7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.422538 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c"} err="failed to get container status \"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\": rpc error: code = NotFound desc = could not find container \"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\": container with ID starting with 7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.422640 4765 scope.go:117] "RemoveContainer" containerID="85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.422978 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85"} err="failed to get container status \"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\": rpc error: code = NotFound desc = could not find container \"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\": container with ID starting with 85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.422999 4765 scope.go:117] "RemoveContainer" containerID="b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.423250 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230"} err="failed to get container status \"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\": rpc error: code = NotFound desc = could not find container \"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\": container with ID starting with b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.423270 4765 scope.go:117] "RemoveContainer" containerID="28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.423448 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd"} err="failed to get container status \"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\": rpc error: code = NotFound desc = could not find container \"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\": container with ID starting with 28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.423468 4765 scope.go:117] "RemoveContainer" containerID="9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.423704 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176"} err="failed to get container status \"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\": rpc error: code = NotFound desc = could not find container \"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\": container with ID starting with 9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.423729 4765 scope.go:117] "RemoveContainer" containerID="46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.423926 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f"} err="failed to get container status \"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\": rpc error: code = NotFound desc = could not find container \"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\": container with ID starting with 46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.423945 4765 scope.go:117] "RemoveContainer" containerID="594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.424158 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe"} err="failed to get container status \"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\": rpc error: code = NotFound desc = could not find container \"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\": container with ID starting with 594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.424272 4765 scope.go:117] "RemoveContainer" containerID="575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.424562 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d"} err="failed to get container status \"575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\": rpc error: code = NotFound desc = could not find container \"575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\": container with ID starting with 575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.424667 4765 scope.go:117] "RemoveContainer" containerID="ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.424978 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495"} err="failed to get container status \"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495\": rpc error: code = NotFound desc = could not find container \"ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495\": container with ID starting with ccabd51a80a2730bcf2a6fefd713529e7829c74eae2a2c050bfc143b1a362495 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.425009 4765 scope.go:117] "RemoveContainer" containerID="e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.425368 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839"} err="failed to get container status \"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839\": rpc error: code = NotFound desc = could not find container \"e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839\": container with ID starting with e0a4bb43623b6336e82664042e771350122424e014c908f0ec1f938d90ea2839 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.425390 4765 scope.go:117] "RemoveContainer" containerID="7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.425793 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c"} err="failed to get container status \"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\": rpc error: code = NotFound desc = could not find container \"7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c\": container with ID starting with 7d6f24cff0740ca40d6bbc4105dffcca63ed0b73c963e942b41bbcfef5b1971c not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.425817 4765 scope.go:117] "RemoveContainer" containerID="85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.426037 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85"} err="failed to get container status \"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\": rpc error: code = NotFound desc = could not find container \"85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85\": container with ID starting with 85b2878122a3c2339abe82afac9d8de0c959bd0607e6b93d78457330f2af4f85 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.426064 4765 scope.go:117] "RemoveContainer" containerID="b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.426317 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230"} err="failed to get container status \"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\": rpc error: code = NotFound desc = could not find container \"b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230\": container with ID starting with b611dadfef5e4d56035bbea460fb4c4937ec3d79bdefe387e4a46d8699ee3230 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.426337 4765 scope.go:117] "RemoveContainer" containerID="28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.426541 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd"} err="failed to get container status \"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\": rpc error: code = NotFound desc = could not find container \"28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd\": container with ID starting with 28b85dc6a7d7292d30cecfecf4e9d076bf0a7caa0defbc347f5764ac778ac8cd not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.426562 4765 scope.go:117] "RemoveContainer" containerID="9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.426753 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176"} err="failed to get container status \"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\": rpc error: code = NotFound desc = could not find container \"9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176\": container with ID starting with 9d805bd1229efca37d3b934a2de8447ecbbea8401718f34ca3b5f8770ec39176 not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.426809 4765 scope.go:117] "RemoveContainer" containerID="46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.427049 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f"} err="failed to get container status \"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\": rpc error: code = NotFound desc = could not find container \"46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f\": container with ID starting with 46f23b6f671f74be37ef85e4af3d5ede6b6604e41f5ffd7d83d72ce88c46ba5f not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.427162 4765 scope.go:117] "RemoveContainer" containerID="594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.427496 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe"} err="failed to get container status \"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\": rpc error: code = NotFound desc = could not find container \"594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe\": container with ID starting with 594fb49083f44b012e1a4e6219014ad73f502c4d6314fb14583e4c33be6106fe not found: ID does not exist" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.427548 4765 scope.go:117] "RemoveContainer" containerID="575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d" Dec 10 06:58:55 crc kubenswrapper[4765]: I1210 06:58:55.427847 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d"} err="failed to get container status \"575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\": rpc error: code = NotFound desc = could not find container \"575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d\": container with ID starting with 575e36c9e65d5188192a43bd947c6d7be2483c9c2cbae7a7ee53b1cf7d0d2f1d not found: ID does not exist" Dec 10 06:58:56 crc kubenswrapper[4765]: I1210 06:58:56.147449 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" event={"ID":"286a2702-d1be-46bc-9313-81bef0618ff9","Type":"ContainerStarted","Data":"454520280c71302f51a911358ce755360bcf1a9f263a8454465c8cfb34d456b6"} Dec 10 06:58:56 crc kubenswrapper[4765]: I1210 06:58:56.147493 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" event={"ID":"286a2702-d1be-46bc-9313-81bef0618ff9","Type":"ContainerStarted","Data":"52e2533bfa21a5251a4b85eff768a8764f931062dbd742cf045e705a59095051"} Dec 10 06:58:56 crc kubenswrapper[4765]: I1210 06:58:56.147506 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" event={"ID":"286a2702-d1be-46bc-9313-81bef0618ff9","Type":"ContainerStarted","Data":"3c9f63bed58926814dda6057e95830d43d6352afd01ed659d9480ee5595ae179"} Dec 10 06:58:56 crc kubenswrapper[4765]: I1210 06:58:56.147515 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" event={"ID":"286a2702-d1be-46bc-9313-81bef0618ff9","Type":"ContainerStarted","Data":"7a142df11e456cae8ade4045ab8229f23a6e34aadeb3b97df98c9565c540080a"} Dec 10 06:58:56 crc kubenswrapper[4765]: I1210 06:58:56.147525 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" event={"ID":"286a2702-d1be-46bc-9313-81bef0618ff9","Type":"ContainerStarted","Data":"95970c1dd9d2e22d9f2b862c9c1de5a39d3eca49c92c4c572daba700d76dc91a"} Dec 10 06:58:56 crc kubenswrapper[4765]: I1210 06:58:56.147536 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" event={"ID":"286a2702-d1be-46bc-9313-81bef0618ff9","Type":"ContainerStarted","Data":"069fb2473cd513b2c3a76e96930ef41ba704c50e49217ab029fb689ff07e4b99"} Dec 10 06:58:56 crc kubenswrapper[4765]: I1210 06:58:56.149208 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r78vd_d5227381-9852-49ce-96f1-220c42aab12a/kube-multus/2.log" Dec 10 06:58:56 crc kubenswrapper[4765]: I1210 06:58:56.597458 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3" path="/var/lib/kubelet/pods/8ba0a5a0-6a96-4829-b5c0-6fe8310ab2e3/volumes" Dec 10 06:58:58 crc kubenswrapper[4765]: I1210 06:58:58.162118 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" event={"ID":"286a2702-d1be-46bc-9313-81bef0618ff9","Type":"ContainerStarted","Data":"af4355f3fbbc1af7a6d82b3f3b282ba2693bee917cc0e44b31e93af4aaf1b17e"} Dec 10 06:59:01 crc kubenswrapper[4765]: I1210 06:59:01.181246 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" event={"ID":"286a2702-d1be-46bc-9313-81bef0618ff9","Type":"ContainerStarted","Data":"676b6b940ab645fd66e9d37500a93cde651bb587e57d9cdc14998ea73b50dfe6"} Dec 10 06:59:01 crc kubenswrapper[4765]: I1210 06:59:01.181994 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:59:01 crc kubenswrapper[4765]: I1210 06:59:01.182008 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:59:01 crc kubenswrapper[4765]: I1210 06:59:01.210970 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:59:01 crc kubenswrapper[4765]: I1210 06:59:01.213177 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" podStartSLOduration=7.213167392 podStartE2EDuration="7.213167392s" podCreationTimestamp="2025-12-10 06:58:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 06:59:01.210873307 +0000 UTC m=+660.937538623" watchObservedRunningTime="2025-12-10 06:59:01.213167392 +0000 UTC m=+660.939832708" Dec 10 06:59:02 crc kubenswrapper[4765]: I1210 06:59:02.186006 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:59:02 crc kubenswrapper[4765]: I1210 06:59:02.212301 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:59:03 crc kubenswrapper[4765]: I1210 06:59:03.778374 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-5ccw4"] Dec 10 06:59:03 crc kubenswrapper[4765]: I1210 06:59:03.779208 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:03 crc kubenswrapper[4765]: I1210 06:59:03.783745 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Dec 10 06:59:03 crc kubenswrapper[4765]: I1210 06:59:03.783860 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Dec 10 06:59:03 crc kubenswrapper[4765]: I1210 06:59:03.784563 4765 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-2xn5f" Dec 10 06:59:03 crc kubenswrapper[4765]: I1210 06:59:03.784660 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Dec 10 06:59:03 crc kubenswrapper[4765]: I1210 06:59:03.789540 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-5ccw4"] Dec 10 06:59:03 crc kubenswrapper[4765]: I1210 06:59:03.818530 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/0650ddc2-3ecc-49d5-bd29-54455ff56ebc-crc-storage\") pod \"crc-storage-crc-5ccw4\" (UID: \"0650ddc2-3ecc-49d5-bd29-54455ff56ebc\") " pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:03 crc kubenswrapper[4765]: I1210 06:59:03.818603 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/0650ddc2-3ecc-49d5-bd29-54455ff56ebc-node-mnt\") pod \"crc-storage-crc-5ccw4\" (UID: \"0650ddc2-3ecc-49d5-bd29-54455ff56ebc\") " pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:03 crc kubenswrapper[4765]: I1210 06:59:03.818629 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvgsn\" (UniqueName: \"kubernetes.io/projected/0650ddc2-3ecc-49d5-bd29-54455ff56ebc-kube-api-access-fvgsn\") pod \"crc-storage-crc-5ccw4\" (UID: \"0650ddc2-3ecc-49d5-bd29-54455ff56ebc\") " pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:03 crc kubenswrapper[4765]: I1210 06:59:03.919608 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/0650ddc2-3ecc-49d5-bd29-54455ff56ebc-crc-storage\") pod \"crc-storage-crc-5ccw4\" (UID: \"0650ddc2-3ecc-49d5-bd29-54455ff56ebc\") " pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:03 crc kubenswrapper[4765]: I1210 06:59:03.919718 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/0650ddc2-3ecc-49d5-bd29-54455ff56ebc-node-mnt\") pod \"crc-storage-crc-5ccw4\" (UID: \"0650ddc2-3ecc-49d5-bd29-54455ff56ebc\") " pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:03 crc kubenswrapper[4765]: I1210 06:59:03.919744 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvgsn\" (UniqueName: \"kubernetes.io/projected/0650ddc2-3ecc-49d5-bd29-54455ff56ebc-kube-api-access-fvgsn\") pod \"crc-storage-crc-5ccw4\" (UID: \"0650ddc2-3ecc-49d5-bd29-54455ff56ebc\") " pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:03 crc kubenswrapper[4765]: I1210 06:59:03.920286 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/0650ddc2-3ecc-49d5-bd29-54455ff56ebc-node-mnt\") pod \"crc-storage-crc-5ccw4\" (UID: \"0650ddc2-3ecc-49d5-bd29-54455ff56ebc\") " pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:03 crc kubenswrapper[4765]: I1210 06:59:03.920449 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/0650ddc2-3ecc-49d5-bd29-54455ff56ebc-crc-storage\") pod \"crc-storage-crc-5ccw4\" (UID: \"0650ddc2-3ecc-49d5-bd29-54455ff56ebc\") " pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:03 crc kubenswrapper[4765]: I1210 06:59:03.940687 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvgsn\" (UniqueName: \"kubernetes.io/projected/0650ddc2-3ecc-49d5-bd29-54455ff56ebc-kube-api-access-fvgsn\") pod \"crc-storage-crc-5ccw4\" (UID: \"0650ddc2-3ecc-49d5-bd29-54455ff56ebc\") " pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:04 crc kubenswrapper[4765]: I1210 06:59:04.103571 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:04 crc kubenswrapper[4765]: E1210 06:59:04.139378 4765 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5ccw4_crc-storage_0650ddc2-3ecc-49d5-bd29-54455ff56ebc_0(9b1c66d833d86d0468ebfc653b04033d08f8defb9d5680c187d0fa9aa3d3e9b5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 10 06:59:04 crc kubenswrapper[4765]: E1210 06:59:04.139462 4765 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5ccw4_crc-storage_0650ddc2-3ecc-49d5-bd29-54455ff56ebc_0(9b1c66d833d86d0468ebfc653b04033d08f8defb9d5680c187d0fa9aa3d3e9b5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:04 crc kubenswrapper[4765]: E1210 06:59:04.139488 4765 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5ccw4_crc-storage_0650ddc2-3ecc-49d5-bd29-54455ff56ebc_0(9b1c66d833d86d0468ebfc653b04033d08f8defb9d5680c187d0fa9aa3d3e9b5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:04 crc kubenswrapper[4765]: E1210 06:59:04.139542 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-5ccw4_crc-storage(0650ddc2-3ecc-49d5-bd29-54455ff56ebc)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-5ccw4_crc-storage(0650ddc2-3ecc-49d5-bd29-54455ff56ebc)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5ccw4_crc-storage_0650ddc2-3ecc-49d5-bd29-54455ff56ebc_0(9b1c66d833d86d0468ebfc653b04033d08f8defb9d5680c187d0fa9aa3d3e9b5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-5ccw4" podUID="0650ddc2-3ecc-49d5-bd29-54455ff56ebc" Dec 10 06:59:04 crc kubenswrapper[4765]: I1210 06:59:04.193572 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:04 crc kubenswrapper[4765]: I1210 06:59:04.194430 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:04 crc kubenswrapper[4765]: E1210 06:59:04.219119 4765 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5ccw4_crc-storage_0650ddc2-3ecc-49d5-bd29-54455ff56ebc_0(c91f4b156bad8a0921d7cd3fe222d3a811287486e11a4ce92cf6207b6e3b6192): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 10 06:59:04 crc kubenswrapper[4765]: E1210 06:59:04.219190 4765 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5ccw4_crc-storage_0650ddc2-3ecc-49d5-bd29-54455ff56ebc_0(c91f4b156bad8a0921d7cd3fe222d3a811287486e11a4ce92cf6207b6e3b6192): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:04 crc kubenswrapper[4765]: E1210 06:59:04.219213 4765 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5ccw4_crc-storage_0650ddc2-3ecc-49d5-bd29-54455ff56ebc_0(c91f4b156bad8a0921d7cd3fe222d3a811287486e11a4ce92cf6207b6e3b6192): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:04 crc kubenswrapper[4765]: E1210 06:59:04.219260 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-5ccw4_crc-storage(0650ddc2-3ecc-49d5-bd29-54455ff56ebc)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-5ccw4_crc-storage(0650ddc2-3ecc-49d5-bd29-54455ff56ebc)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5ccw4_crc-storage_0650ddc2-3ecc-49d5-bd29-54455ff56ebc_0(c91f4b156bad8a0921d7cd3fe222d3a811287486e11a4ce92cf6207b6e3b6192): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-5ccw4" podUID="0650ddc2-3ecc-49d5-bd29-54455ff56ebc" Dec 10 06:59:07 crc kubenswrapper[4765]: I1210 06:59:07.588868 4765 scope.go:117] "RemoveContainer" containerID="b68f4e569b7eeef5928122abf99162669328f71c395b2c29915061eb90e372b3" Dec 10 06:59:07 crc kubenswrapper[4765]: E1210 06:59:07.589340 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-r78vd_openshift-multus(d5227381-9852-49ce-96f1-220c42aab12a)\"" pod="openshift-multus/multus-r78vd" podUID="d5227381-9852-49ce-96f1-220c42aab12a" Dec 10 06:59:16 crc kubenswrapper[4765]: I1210 06:59:16.588788 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:16 crc kubenswrapper[4765]: I1210 06:59:16.589676 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:16 crc kubenswrapper[4765]: E1210 06:59:16.614152 4765 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5ccw4_crc-storage_0650ddc2-3ecc-49d5-bd29-54455ff56ebc_0(bb3e8b4bee25f24e515b4fbf172fd7e807f925ad3d5cfb86e78c7672e2431066): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 10 06:59:16 crc kubenswrapper[4765]: E1210 06:59:16.614519 4765 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5ccw4_crc-storage_0650ddc2-3ecc-49d5-bd29-54455ff56ebc_0(bb3e8b4bee25f24e515b4fbf172fd7e807f925ad3d5cfb86e78c7672e2431066): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:16 crc kubenswrapper[4765]: E1210 06:59:16.614538 4765 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5ccw4_crc-storage_0650ddc2-3ecc-49d5-bd29-54455ff56ebc_0(bb3e8b4bee25f24e515b4fbf172fd7e807f925ad3d5cfb86e78c7672e2431066): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:16 crc kubenswrapper[4765]: E1210 06:59:16.614585 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-5ccw4_crc-storage(0650ddc2-3ecc-49d5-bd29-54455ff56ebc)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-5ccw4_crc-storage(0650ddc2-3ecc-49d5-bd29-54455ff56ebc)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-5ccw4_crc-storage_0650ddc2-3ecc-49d5-bd29-54455ff56ebc_0(bb3e8b4bee25f24e515b4fbf172fd7e807f925ad3d5cfb86e78c7672e2431066): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-5ccw4" podUID="0650ddc2-3ecc-49d5-bd29-54455ff56ebc" Dec 10 06:59:21 crc kubenswrapper[4765]: I1210 06:59:21.589841 4765 scope.go:117] "RemoveContainer" containerID="b68f4e569b7eeef5928122abf99162669328f71c395b2c29915061eb90e372b3" Dec 10 06:59:22 crc kubenswrapper[4765]: I1210 06:59:22.290777 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-r78vd_d5227381-9852-49ce-96f1-220c42aab12a/kube-multus/2.log" Dec 10 06:59:22 crc kubenswrapper[4765]: I1210 06:59:22.291098 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-r78vd" event={"ID":"d5227381-9852-49ce-96f1-220c42aab12a","Type":"ContainerStarted","Data":"1c78ae07cb949c5be3d77d58d8d5d1b722cc88787c0ca9b61124b4426b6d79eb"} Dec 10 06:59:24 crc kubenswrapper[4765]: I1210 06:59:24.777930 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rrd5r" Dec 10 06:59:27 crc kubenswrapper[4765]: I1210 06:59:27.588588 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:27 crc kubenswrapper[4765]: I1210 06:59:27.589029 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:27 crc kubenswrapper[4765]: I1210 06:59:27.752001 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-5ccw4"] Dec 10 06:59:27 crc kubenswrapper[4765]: I1210 06:59:27.760917 4765 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 06:59:28 crc kubenswrapper[4765]: I1210 06:59:28.320280 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-5ccw4" event={"ID":"0650ddc2-3ecc-49d5-bd29-54455ff56ebc","Type":"ContainerStarted","Data":"b154e1934437bb6dfddc24be06c68aa755c85d6a1cf8e1590095c263f99c094b"} Dec 10 06:59:32 crc kubenswrapper[4765]: I1210 06:59:32.340434 4765 generic.go:334] "Generic (PLEG): container finished" podID="0650ddc2-3ecc-49d5-bd29-54455ff56ebc" containerID="2e0049df642baac550027bf78f400c731b96917d76174c1af3703380175c40ba" exitCode=0 Dec 10 06:59:32 crc kubenswrapper[4765]: I1210 06:59:32.340866 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-5ccw4" event={"ID":"0650ddc2-3ecc-49d5-bd29-54455ff56ebc","Type":"ContainerDied","Data":"2e0049df642baac550027bf78f400c731b96917d76174c1af3703380175c40ba"} Dec 10 06:59:33 crc kubenswrapper[4765]: I1210 06:59:33.556709 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:33 crc kubenswrapper[4765]: I1210 06:59:33.708556 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvgsn\" (UniqueName: \"kubernetes.io/projected/0650ddc2-3ecc-49d5-bd29-54455ff56ebc-kube-api-access-fvgsn\") pod \"0650ddc2-3ecc-49d5-bd29-54455ff56ebc\" (UID: \"0650ddc2-3ecc-49d5-bd29-54455ff56ebc\") " Dec 10 06:59:33 crc kubenswrapper[4765]: I1210 06:59:33.708666 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/0650ddc2-3ecc-49d5-bd29-54455ff56ebc-crc-storage\") pod \"0650ddc2-3ecc-49d5-bd29-54455ff56ebc\" (UID: \"0650ddc2-3ecc-49d5-bd29-54455ff56ebc\") " Dec 10 06:59:33 crc kubenswrapper[4765]: I1210 06:59:33.708756 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/0650ddc2-3ecc-49d5-bd29-54455ff56ebc-node-mnt\") pod \"0650ddc2-3ecc-49d5-bd29-54455ff56ebc\" (UID: \"0650ddc2-3ecc-49d5-bd29-54455ff56ebc\") " Dec 10 06:59:33 crc kubenswrapper[4765]: I1210 06:59:33.708916 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0650ddc2-3ecc-49d5-bd29-54455ff56ebc-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "0650ddc2-3ecc-49d5-bd29-54455ff56ebc" (UID: "0650ddc2-3ecc-49d5-bd29-54455ff56ebc"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 06:59:33 crc kubenswrapper[4765]: I1210 06:59:33.709222 4765 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/0650ddc2-3ecc-49d5-bd29-54455ff56ebc-node-mnt\") on node \"crc\" DevicePath \"\"" Dec 10 06:59:33 crc kubenswrapper[4765]: I1210 06:59:33.715141 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0650ddc2-3ecc-49d5-bd29-54455ff56ebc-kube-api-access-fvgsn" (OuterVolumeSpecName: "kube-api-access-fvgsn") pod "0650ddc2-3ecc-49d5-bd29-54455ff56ebc" (UID: "0650ddc2-3ecc-49d5-bd29-54455ff56ebc"). InnerVolumeSpecName "kube-api-access-fvgsn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:59:33 crc kubenswrapper[4765]: I1210 06:59:33.722020 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0650ddc2-3ecc-49d5-bd29-54455ff56ebc-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "0650ddc2-3ecc-49d5-bd29-54455ff56ebc" (UID: "0650ddc2-3ecc-49d5-bd29-54455ff56ebc"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 06:59:33 crc kubenswrapper[4765]: I1210 06:59:33.810287 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvgsn\" (UniqueName: \"kubernetes.io/projected/0650ddc2-3ecc-49d5-bd29-54455ff56ebc-kube-api-access-fvgsn\") on node \"crc\" DevicePath \"\"" Dec 10 06:59:33 crc kubenswrapper[4765]: I1210 06:59:33.810345 4765 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/0650ddc2-3ecc-49d5-bd29-54455ff56ebc-crc-storage\") on node \"crc\" DevicePath \"\"" Dec 10 06:59:34 crc kubenswrapper[4765]: I1210 06:59:34.351382 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-5ccw4" event={"ID":"0650ddc2-3ecc-49d5-bd29-54455ff56ebc","Type":"ContainerDied","Data":"b154e1934437bb6dfddc24be06c68aa755c85d6a1cf8e1590095c263f99c094b"} Dec 10 06:59:34 crc kubenswrapper[4765]: I1210 06:59:34.351417 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5ccw4" Dec 10 06:59:34 crc kubenswrapper[4765]: I1210 06:59:34.351428 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b154e1934437bb6dfddc24be06c68aa755c85d6a1cf8e1590095c263f99c094b" Dec 10 06:59:41 crc kubenswrapper[4765]: I1210 06:59:41.952947 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl"] Dec 10 06:59:41 crc kubenswrapper[4765]: E1210 06:59:41.954580 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0650ddc2-3ecc-49d5-bd29-54455ff56ebc" containerName="storage" Dec 10 06:59:41 crc kubenswrapper[4765]: I1210 06:59:41.954665 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="0650ddc2-3ecc-49d5-bd29-54455ff56ebc" containerName="storage" Dec 10 06:59:41 crc kubenswrapper[4765]: I1210 06:59:41.954841 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="0650ddc2-3ecc-49d5-bd29-54455ff56ebc" containerName="storage" Dec 10 06:59:41 crc kubenswrapper[4765]: I1210 06:59:41.955641 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl" Dec 10 06:59:41 crc kubenswrapper[4765]: I1210 06:59:41.961608 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 10 06:59:41 crc kubenswrapper[4765]: I1210 06:59:41.966218 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl"] Dec 10 06:59:42 crc kubenswrapper[4765]: I1210 06:59:42.106906 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e54ad43d-17ff-4bcc-b0a2-e328c40c83f5-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl\" (UID: \"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl" Dec 10 06:59:42 crc kubenswrapper[4765]: I1210 06:59:42.106977 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e54ad43d-17ff-4bcc-b0a2-e328c40c83f5-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl\" (UID: \"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl" Dec 10 06:59:42 crc kubenswrapper[4765]: I1210 06:59:42.107022 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mdzn\" (UniqueName: \"kubernetes.io/projected/e54ad43d-17ff-4bcc-b0a2-e328c40c83f5-kube-api-access-7mdzn\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl\" (UID: \"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl" Dec 10 06:59:42 crc kubenswrapper[4765]: I1210 06:59:42.208231 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mdzn\" (UniqueName: \"kubernetes.io/projected/e54ad43d-17ff-4bcc-b0a2-e328c40c83f5-kube-api-access-7mdzn\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl\" (UID: \"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl" Dec 10 06:59:42 crc kubenswrapper[4765]: I1210 06:59:42.208314 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e54ad43d-17ff-4bcc-b0a2-e328c40c83f5-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl\" (UID: \"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl" Dec 10 06:59:42 crc kubenswrapper[4765]: I1210 06:59:42.208380 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e54ad43d-17ff-4bcc-b0a2-e328c40c83f5-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl\" (UID: \"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl" Dec 10 06:59:42 crc kubenswrapper[4765]: I1210 06:59:42.208839 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e54ad43d-17ff-4bcc-b0a2-e328c40c83f5-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl\" (UID: \"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl" Dec 10 06:59:42 crc kubenswrapper[4765]: I1210 06:59:42.208902 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e54ad43d-17ff-4bcc-b0a2-e328c40c83f5-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl\" (UID: \"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl" Dec 10 06:59:42 crc kubenswrapper[4765]: I1210 06:59:42.226112 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mdzn\" (UniqueName: \"kubernetes.io/projected/e54ad43d-17ff-4bcc-b0a2-e328c40c83f5-kube-api-access-7mdzn\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl\" (UID: \"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl" Dec 10 06:59:42 crc kubenswrapper[4765]: I1210 06:59:42.271784 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl" Dec 10 06:59:42 crc kubenswrapper[4765]: I1210 06:59:42.434661 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl"] Dec 10 06:59:43 crc kubenswrapper[4765]: I1210 06:59:43.395019 4765 generic.go:334] "Generic (PLEG): container finished" podID="e54ad43d-17ff-4bcc-b0a2-e328c40c83f5" containerID="15e226165d709091a7dc501f7bd0c80c7c0953dec4540a4bd5adc45c942c7368" exitCode=0 Dec 10 06:59:43 crc kubenswrapper[4765]: I1210 06:59:43.395107 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl" event={"ID":"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5","Type":"ContainerDied","Data":"15e226165d709091a7dc501f7bd0c80c7c0953dec4540a4bd5adc45c942c7368"} Dec 10 06:59:43 crc kubenswrapper[4765]: I1210 06:59:43.395146 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl" event={"ID":"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5","Type":"ContainerStarted","Data":"8f38f38f9fa987823bfb5933054e76552e61d04fc98c99a156ff3f1bc1705610"} Dec 10 06:59:46 crc kubenswrapper[4765]: I1210 06:59:46.420995 4765 generic.go:334] "Generic (PLEG): container finished" podID="e54ad43d-17ff-4bcc-b0a2-e328c40c83f5" containerID="2acda7b31e160a0286f1bbec46532296f98f601bc16cb1df32acb3dd717f5c7d" exitCode=0 Dec 10 06:59:46 crc kubenswrapper[4765]: I1210 06:59:46.421040 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl" event={"ID":"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5","Type":"ContainerDied","Data":"2acda7b31e160a0286f1bbec46532296f98f601bc16cb1df32acb3dd717f5c7d"} Dec 10 06:59:47 crc kubenswrapper[4765]: I1210 06:59:47.427616 4765 generic.go:334] "Generic (PLEG): container finished" podID="e54ad43d-17ff-4bcc-b0a2-e328c40c83f5" containerID="254bebf2c2500f99bc7bf31e1f89119b538aa1fd5ddf20de6390e7646cfc3d0f" exitCode=0 Dec 10 06:59:47 crc kubenswrapper[4765]: I1210 06:59:47.427655 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl" event={"ID":"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5","Type":"ContainerDied","Data":"254bebf2c2500f99bc7bf31e1f89119b538aa1fd5ddf20de6390e7646cfc3d0f"} Dec 10 06:59:48 crc kubenswrapper[4765]: I1210 06:59:48.653920 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl" Dec 10 06:59:48 crc kubenswrapper[4765]: I1210 06:59:48.685712 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e54ad43d-17ff-4bcc-b0a2-e328c40c83f5-bundle\") pod \"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5\" (UID: \"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5\") " Dec 10 06:59:48 crc kubenswrapper[4765]: I1210 06:59:48.685774 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mdzn\" (UniqueName: \"kubernetes.io/projected/e54ad43d-17ff-4bcc-b0a2-e328c40c83f5-kube-api-access-7mdzn\") pod \"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5\" (UID: \"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5\") " Dec 10 06:59:48 crc kubenswrapper[4765]: I1210 06:59:48.685799 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e54ad43d-17ff-4bcc-b0a2-e328c40c83f5-util\") pod \"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5\" (UID: \"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5\") " Dec 10 06:59:48 crc kubenswrapper[4765]: I1210 06:59:48.687280 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e54ad43d-17ff-4bcc-b0a2-e328c40c83f5-bundle" (OuterVolumeSpecName: "bundle") pod "e54ad43d-17ff-4bcc-b0a2-e328c40c83f5" (UID: "e54ad43d-17ff-4bcc-b0a2-e328c40c83f5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:59:48 crc kubenswrapper[4765]: I1210 06:59:48.691511 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e54ad43d-17ff-4bcc-b0a2-e328c40c83f5-kube-api-access-7mdzn" (OuterVolumeSpecName: "kube-api-access-7mdzn") pod "e54ad43d-17ff-4bcc-b0a2-e328c40c83f5" (UID: "e54ad43d-17ff-4bcc-b0a2-e328c40c83f5"). InnerVolumeSpecName "kube-api-access-7mdzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 06:59:48 crc kubenswrapper[4765]: I1210 06:59:48.700135 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e54ad43d-17ff-4bcc-b0a2-e328c40c83f5-util" (OuterVolumeSpecName: "util") pod "e54ad43d-17ff-4bcc-b0a2-e328c40c83f5" (UID: "e54ad43d-17ff-4bcc-b0a2-e328c40c83f5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 06:59:48 crc kubenswrapper[4765]: I1210 06:59:48.786869 4765 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e54ad43d-17ff-4bcc-b0a2-e328c40c83f5-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 06:59:48 crc kubenswrapper[4765]: I1210 06:59:48.786902 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mdzn\" (UniqueName: \"kubernetes.io/projected/e54ad43d-17ff-4bcc-b0a2-e328c40c83f5-kube-api-access-7mdzn\") on node \"crc\" DevicePath \"\"" Dec 10 06:59:48 crc kubenswrapper[4765]: I1210 06:59:48.786913 4765 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e54ad43d-17ff-4bcc-b0a2-e328c40c83f5-util\") on node \"crc\" DevicePath \"\"" Dec 10 06:59:49 crc kubenswrapper[4765]: I1210 06:59:49.440259 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl" event={"ID":"e54ad43d-17ff-4bcc-b0a2-e328c40c83f5","Type":"ContainerDied","Data":"8f38f38f9fa987823bfb5933054e76552e61d04fc98c99a156ff3f1bc1705610"} Dec 10 06:59:49 crc kubenswrapper[4765]: I1210 06:59:49.440307 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f38f38f9fa987823bfb5933054e76552e61d04fc98c99a156ff3f1bc1705610" Dec 10 06:59:49 crc kubenswrapper[4765]: I1210 06:59:49.440349 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl" Dec 10 06:59:53 crc kubenswrapper[4765]: I1210 06:59:53.484860 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-pz966"] Dec 10 06:59:53 crc kubenswrapper[4765]: E1210 06:59:53.485729 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e54ad43d-17ff-4bcc-b0a2-e328c40c83f5" containerName="pull" Dec 10 06:59:53 crc kubenswrapper[4765]: I1210 06:59:53.485753 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e54ad43d-17ff-4bcc-b0a2-e328c40c83f5" containerName="pull" Dec 10 06:59:53 crc kubenswrapper[4765]: E1210 06:59:53.485784 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e54ad43d-17ff-4bcc-b0a2-e328c40c83f5" containerName="extract" Dec 10 06:59:53 crc kubenswrapper[4765]: I1210 06:59:53.485796 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e54ad43d-17ff-4bcc-b0a2-e328c40c83f5" containerName="extract" Dec 10 06:59:53 crc kubenswrapper[4765]: E1210 06:59:53.485812 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e54ad43d-17ff-4bcc-b0a2-e328c40c83f5" containerName="util" Dec 10 06:59:53 crc kubenswrapper[4765]: I1210 06:59:53.485820 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e54ad43d-17ff-4bcc-b0a2-e328c40c83f5" containerName="util" Dec 10 06:59:53 crc kubenswrapper[4765]: I1210 06:59:53.485946 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="e54ad43d-17ff-4bcc-b0a2-e328c40c83f5" containerName="extract" Dec 10 06:59:53 crc kubenswrapper[4765]: I1210 06:59:53.486600 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-pz966" Dec 10 06:59:53 crc kubenswrapper[4765]: I1210 06:59:53.491035 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-9qglb" Dec 10 06:59:53 crc kubenswrapper[4765]: I1210 06:59:53.491503 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 10 06:59:53 crc kubenswrapper[4765]: I1210 06:59:53.493719 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 10 06:59:53 crc kubenswrapper[4765]: I1210 06:59:53.499859 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-pz966"] Dec 10 06:59:53 crc kubenswrapper[4765]: I1210 06:59:53.655933 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxlwp\" (UniqueName: \"kubernetes.io/projected/fd1bc451-a918-4dce-89d6-81b1d7950663-kube-api-access-mxlwp\") pod \"nmstate-operator-5b5b58f5c8-pz966\" (UID: \"fd1bc451-a918-4dce-89d6-81b1d7950663\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-pz966" Dec 10 06:59:53 crc kubenswrapper[4765]: I1210 06:59:53.757307 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxlwp\" (UniqueName: \"kubernetes.io/projected/fd1bc451-a918-4dce-89d6-81b1d7950663-kube-api-access-mxlwp\") pod \"nmstate-operator-5b5b58f5c8-pz966\" (UID: \"fd1bc451-a918-4dce-89d6-81b1d7950663\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-pz966" Dec 10 06:59:53 crc kubenswrapper[4765]: I1210 06:59:53.776112 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxlwp\" (UniqueName: \"kubernetes.io/projected/fd1bc451-a918-4dce-89d6-81b1d7950663-kube-api-access-mxlwp\") pod \"nmstate-operator-5b5b58f5c8-pz966\" (UID: \"fd1bc451-a918-4dce-89d6-81b1d7950663\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-pz966" Dec 10 06:59:53 crc kubenswrapper[4765]: I1210 06:59:53.802652 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-pz966" Dec 10 06:59:53 crc kubenswrapper[4765]: I1210 06:59:53.994376 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-pz966"] Dec 10 06:59:54 crc kubenswrapper[4765]: I1210 06:59:54.466953 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-pz966" event={"ID":"fd1bc451-a918-4dce-89d6-81b1d7950663","Type":"ContainerStarted","Data":"2ab6c08169dca4c377b373c6e9fa11ba2276006d4c93a88b041b1a29358f9de6"} Dec 10 06:59:57 crc kubenswrapper[4765]: I1210 06:59:57.482901 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-pz966" event={"ID":"fd1bc451-a918-4dce-89d6-81b1d7950663","Type":"ContainerStarted","Data":"dfa9e5d736b0df415da0cb4fe8e8b4e4b5975ee4463d074dc758fb23e1485311"} Dec 10 06:59:57 crc kubenswrapper[4765]: I1210 06:59:57.501360 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-pz966" podStartSLOduration=1.985982138 podStartE2EDuration="4.501342822s" podCreationTimestamp="2025-12-10 06:59:53 +0000 UTC" firstStartedPulling="2025-12-10 06:59:54.007158286 +0000 UTC m=+713.733823602" lastFinishedPulling="2025-12-10 06:59:56.52251897 +0000 UTC m=+716.249184286" observedRunningTime="2025-12-10 06:59:57.496330119 +0000 UTC m=+717.222995455" watchObservedRunningTime="2025-12-10 06:59:57.501342822 +0000 UTC m=+717.228008138" Dec 10 07:00:00 crc kubenswrapper[4765]: I1210 07:00:00.153292 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz"] Dec 10 07:00:00 crc kubenswrapper[4765]: I1210 07:00:00.154912 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz" Dec 10 07:00:00 crc kubenswrapper[4765]: I1210 07:00:00.156873 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 10 07:00:00 crc kubenswrapper[4765]: I1210 07:00:00.156874 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 10 07:00:00 crc kubenswrapper[4765]: I1210 07:00:00.175232 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz"] Dec 10 07:00:00 crc kubenswrapper[4765]: I1210 07:00:00.333717 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nls5\" (UniqueName: \"kubernetes.io/projected/2666b91c-c78a-4c38-92d7-a4322fc55bd8-kube-api-access-5nls5\") pod \"collect-profiles-29422500-hwqzz\" (UID: \"2666b91c-c78a-4c38-92d7-a4322fc55bd8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz" Dec 10 07:00:00 crc kubenswrapper[4765]: I1210 07:00:00.333774 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2666b91c-c78a-4c38-92d7-a4322fc55bd8-config-volume\") pod \"collect-profiles-29422500-hwqzz\" (UID: \"2666b91c-c78a-4c38-92d7-a4322fc55bd8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz" Dec 10 07:00:00 crc kubenswrapper[4765]: I1210 07:00:00.333879 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2666b91c-c78a-4c38-92d7-a4322fc55bd8-secret-volume\") pod \"collect-profiles-29422500-hwqzz\" (UID: \"2666b91c-c78a-4c38-92d7-a4322fc55bd8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz" Dec 10 07:00:00 crc kubenswrapper[4765]: I1210 07:00:00.434548 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2666b91c-c78a-4c38-92d7-a4322fc55bd8-secret-volume\") pod \"collect-profiles-29422500-hwqzz\" (UID: \"2666b91c-c78a-4c38-92d7-a4322fc55bd8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz" Dec 10 07:00:00 crc kubenswrapper[4765]: I1210 07:00:00.434644 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nls5\" (UniqueName: \"kubernetes.io/projected/2666b91c-c78a-4c38-92d7-a4322fc55bd8-kube-api-access-5nls5\") pod \"collect-profiles-29422500-hwqzz\" (UID: \"2666b91c-c78a-4c38-92d7-a4322fc55bd8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz" Dec 10 07:00:00 crc kubenswrapper[4765]: I1210 07:00:00.434670 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2666b91c-c78a-4c38-92d7-a4322fc55bd8-config-volume\") pod \"collect-profiles-29422500-hwqzz\" (UID: \"2666b91c-c78a-4c38-92d7-a4322fc55bd8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz" Dec 10 07:00:00 crc kubenswrapper[4765]: I1210 07:00:00.435583 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2666b91c-c78a-4c38-92d7-a4322fc55bd8-config-volume\") pod \"collect-profiles-29422500-hwqzz\" (UID: \"2666b91c-c78a-4c38-92d7-a4322fc55bd8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz" Dec 10 07:00:00 crc kubenswrapper[4765]: I1210 07:00:00.440318 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2666b91c-c78a-4c38-92d7-a4322fc55bd8-secret-volume\") pod \"collect-profiles-29422500-hwqzz\" (UID: \"2666b91c-c78a-4c38-92d7-a4322fc55bd8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz" Dec 10 07:00:00 crc kubenswrapper[4765]: I1210 07:00:00.452944 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nls5\" (UniqueName: \"kubernetes.io/projected/2666b91c-c78a-4c38-92d7-a4322fc55bd8-kube-api-access-5nls5\") pod \"collect-profiles-29422500-hwqzz\" (UID: \"2666b91c-c78a-4c38-92d7-a4322fc55bd8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz" Dec 10 07:00:00 crc kubenswrapper[4765]: I1210 07:00:00.475428 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz" Dec 10 07:00:00 crc kubenswrapper[4765]: I1210 07:00:00.648214 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz"] Dec 10 07:00:00 crc kubenswrapper[4765]: W1210 07:00:00.651274 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2666b91c_c78a_4c38_92d7_a4322fc55bd8.slice/crio-6f91ad171355033a671c8f13efd93d66ae2554159abf5297fa2559e6fcda33bd WatchSource:0}: Error finding container 6f91ad171355033a671c8f13efd93d66ae2554159abf5297fa2559e6fcda33bd: Status 404 returned error can't find the container with id 6f91ad171355033a671c8f13efd93d66ae2554159abf5297fa2559e6fcda33bd Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.505168 4765 generic.go:334] "Generic (PLEG): container finished" podID="2666b91c-c78a-4c38-92d7-a4322fc55bd8" containerID="f6bc5c702b03233a5ae15a29205a4114bba8a5bace8f951f5465fc6ae975e03d" exitCode=0 Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.505231 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz" event={"ID":"2666b91c-c78a-4c38-92d7-a4322fc55bd8","Type":"ContainerDied","Data":"f6bc5c702b03233a5ae15a29205a4114bba8a5bace8f951f5465fc6ae975e03d"} Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.506400 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz" event={"ID":"2666b91c-c78a-4c38-92d7-a4322fc55bd8","Type":"ContainerStarted","Data":"6f91ad171355033a671c8f13efd93d66ae2554159abf5297fa2559e6fcda33bd"} Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.859113 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-cwjdq"] Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.860536 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-cwjdq" Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.863068 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-8jjkz" Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.863708 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2zln"] Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.864529 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2zln" Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.865705 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.873850 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-cwjdq"] Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.886207 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2zln"] Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.891744 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-t5v5s"] Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.892453 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-t5v5s" Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.983690 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4vsz5"] Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.984376 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4vsz5" Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.986518 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.986731 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-bmw6z" Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.988677 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 10 07:00:01 crc kubenswrapper[4765]: I1210 07:00:01.995528 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4vsz5"] Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.054868 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/9ed9df09-dada-4084-8201-d969872b21d7-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-4vsz5\" (UID: \"9ed9df09-dada-4084-8201-d969872b21d7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4vsz5" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.054960 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8797b\" (UniqueName: \"kubernetes.io/projected/82aebff8-bbbf-404f-a932-e336e4c46383-kube-api-access-8797b\") pod \"nmstate-handler-t5v5s\" (UID: \"82aebff8-bbbf-404f-a932-e336e4c46383\") " pod="openshift-nmstate/nmstate-handler-t5v5s" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.055183 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/82aebff8-bbbf-404f-a932-e336e4c46383-ovs-socket\") pod \"nmstate-handler-t5v5s\" (UID: \"82aebff8-bbbf-404f-a932-e336e4c46383\") " pod="openshift-nmstate/nmstate-handler-t5v5s" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.055389 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swg72\" (UniqueName: \"kubernetes.io/projected/9ed9df09-dada-4084-8201-d969872b21d7-kube-api-access-swg72\") pod \"nmstate-console-plugin-7fbb5f6569-4vsz5\" (UID: \"9ed9df09-dada-4084-8201-d969872b21d7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4vsz5" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.055439 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbdqt\" (UniqueName: \"kubernetes.io/projected/b741a1af-d805-40d7-ad55-af348e32753d-kube-api-access-rbdqt\") pod \"nmstate-metrics-7f946cbc9-cwjdq\" (UID: \"b741a1af-d805-40d7-ad55-af348e32753d\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-cwjdq" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.055489 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/82aebff8-bbbf-404f-a932-e336e4c46383-dbus-socket\") pod \"nmstate-handler-t5v5s\" (UID: \"82aebff8-bbbf-404f-a932-e336e4c46383\") " pod="openshift-nmstate/nmstate-handler-t5v5s" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.055514 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/d4218171-d405-4118-9535-737b242c9453-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-k2zln\" (UID: \"d4218171-d405-4118-9535-737b242c9453\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2zln" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.055553 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-td99f\" (UniqueName: \"kubernetes.io/projected/d4218171-d405-4118-9535-737b242c9453-kube-api-access-td99f\") pod \"nmstate-webhook-5f6d4c5ccb-k2zln\" (UID: \"d4218171-d405-4118-9535-737b242c9453\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2zln" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.055586 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/82aebff8-bbbf-404f-a932-e336e4c46383-nmstate-lock\") pod \"nmstate-handler-t5v5s\" (UID: \"82aebff8-bbbf-404f-a932-e336e4c46383\") " pod="openshift-nmstate/nmstate-handler-t5v5s" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.055621 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/9ed9df09-dada-4084-8201-d969872b21d7-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-4vsz5\" (UID: \"9ed9df09-dada-4084-8201-d969872b21d7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4vsz5" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.158379 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/82aebff8-bbbf-404f-a932-e336e4c46383-dbus-socket\") pod \"nmstate-handler-t5v5s\" (UID: \"82aebff8-bbbf-404f-a932-e336e4c46383\") " pod="openshift-nmstate/nmstate-handler-t5v5s" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.158785 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/d4218171-d405-4118-9535-737b242c9453-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-k2zln\" (UID: \"d4218171-d405-4118-9535-737b242c9453\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2zln" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.158839 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-td99f\" (UniqueName: \"kubernetes.io/projected/d4218171-d405-4118-9535-737b242c9453-kube-api-access-td99f\") pod \"nmstate-webhook-5f6d4c5ccb-k2zln\" (UID: \"d4218171-d405-4118-9535-737b242c9453\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2zln" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.158880 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/82aebff8-bbbf-404f-a932-e336e4c46383-nmstate-lock\") pod \"nmstate-handler-t5v5s\" (UID: \"82aebff8-bbbf-404f-a932-e336e4c46383\") " pod="openshift-nmstate/nmstate-handler-t5v5s" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.158927 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/9ed9df09-dada-4084-8201-d969872b21d7-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-4vsz5\" (UID: \"9ed9df09-dada-4084-8201-d969872b21d7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4vsz5" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.158963 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/9ed9df09-dada-4084-8201-d969872b21d7-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-4vsz5\" (UID: \"9ed9df09-dada-4084-8201-d969872b21d7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4vsz5" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.159004 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8797b\" (UniqueName: \"kubernetes.io/projected/82aebff8-bbbf-404f-a932-e336e4c46383-kube-api-access-8797b\") pod \"nmstate-handler-t5v5s\" (UID: \"82aebff8-bbbf-404f-a932-e336e4c46383\") " pod="openshift-nmstate/nmstate-handler-t5v5s" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.159049 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/82aebff8-bbbf-404f-a932-e336e4c46383-ovs-socket\") pod \"nmstate-handler-t5v5s\" (UID: \"82aebff8-bbbf-404f-a932-e336e4c46383\") " pod="openshift-nmstate/nmstate-handler-t5v5s" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.159099 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swg72\" (UniqueName: \"kubernetes.io/projected/9ed9df09-dada-4084-8201-d969872b21d7-kube-api-access-swg72\") pod \"nmstate-console-plugin-7fbb5f6569-4vsz5\" (UID: \"9ed9df09-dada-4084-8201-d969872b21d7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4vsz5" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.159148 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-785d49f475-9ck8f"] Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.159794 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/82aebff8-bbbf-404f-a932-e336e4c46383-dbus-socket\") pod \"nmstate-handler-t5v5s\" (UID: \"82aebff8-bbbf-404f-a932-e336e4c46383\") " pod="openshift-nmstate/nmstate-handler-t5v5s" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.160878 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/9ed9df09-dada-4084-8201-d969872b21d7-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-4vsz5\" (UID: \"9ed9df09-dada-4084-8201-d969872b21d7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4vsz5" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.161115 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/82aebff8-bbbf-404f-a932-e336e4c46383-nmstate-lock\") pod \"nmstate-handler-t5v5s\" (UID: \"82aebff8-bbbf-404f-a932-e336e4c46383\") " pod="openshift-nmstate/nmstate-handler-t5v5s" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.161221 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/82aebff8-bbbf-404f-a932-e336e4c46383-ovs-socket\") pod \"nmstate-handler-t5v5s\" (UID: \"82aebff8-bbbf-404f-a932-e336e4c46383\") " pod="openshift-nmstate/nmstate-handler-t5v5s" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.159166 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbdqt\" (UniqueName: \"kubernetes.io/projected/b741a1af-d805-40d7-ad55-af348e32753d-kube-api-access-rbdqt\") pod \"nmstate-metrics-7f946cbc9-cwjdq\" (UID: \"b741a1af-d805-40d7-ad55-af348e32753d\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-cwjdq" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.162636 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.168776 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/9ed9df09-dada-4084-8201-d969872b21d7-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-4vsz5\" (UID: \"9ed9df09-dada-4084-8201-d969872b21d7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4vsz5" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.175762 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/d4218171-d405-4118-9535-737b242c9453-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-k2zln\" (UID: \"d4218171-d405-4118-9535-737b242c9453\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2zln" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.189558 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-785d49f475-9ck8f"] Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.190314 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbdqt\" (UniqueName: \"kubernetes.io/projected/b741a1af-d805-40d7-ad55-af348e32753d-kube-api-access-rbdqt\") pod \"nmstate-metrics-7f946cbc9-cwjdq\" (UID: \"b741a1af-d805-40d7-ad55-af348e32753d\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-cwjdq" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.190666 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-cwjdq" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.209761 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8797b\" (UniqueName: \"kubernetes.io/projected/82aebff8-bbbf-404f-a932-e336e4c46383-kube-api-access-8797b\") pod \"nmstate-handler-t5v5s\" (UID: \"82aebff8-bbbf-404f-a932-e336e4c46383\") " pod="openshift-nmstate/nmstate-handler-t5v5s" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.211380 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-t5v5s" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.218423 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swg72\" (UniqueName: \"kubernetes.io/projected/9ed9df09-dada-4084-8201-d969872b21d7-kube-api-access-swg72\") pod \"nmstate-console-plugin-7fbb5f6569-4vsz5\" (UID: \"9ed9df09-dada-4084-8201-d969872b21d7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4vsz5" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.219021 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-td99f\" (UniqueName: \"kubernetes.io/projected/d4218171-d405-4118-9535-737b242c9453-kube-api-access-td99f\") pod \"nmstate-webhook-5f6d4c5ccb-k2zln\" (UID: \"d4218171-d405-4118-9535-737b242c9453\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2zln" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.262457 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/20555abe-8f65-4f7e-8c3a-142b3005184b-console-serving-cert\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.262498 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/20555abe-8f65-4f7e-8c3a-142b3005184b-console-oauth-config\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.262525 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/20555abe-8f65-4f7e-8c3a-142b3005184b-oauth-serving-cert\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.262572 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/20555abe-8f65-4f7e-8c3a-142b3005184b-service-ca\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.262603 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/20555abe-8f65-4f7e-8c3a-142b3005184b-trusted-ca-bundle\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.262628 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/20555abe-8f65-4f7e-8c3a-142b3005184b-console-config\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.262963 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfrx6\" (UniqueName: \"kubernetes.io/projected/20555abe-8f65-4f7e-8c3a-142b3005184b-kube-api-access-tfrx6\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.298231 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4vsz5" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.363509 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/20555abe-8f65-4f7e-8c3a-142b3005184b-service-ca\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.363825 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/20555abe-8f65-4f7e-8c3a-142b3005184b-trusted-ca-bundle\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.363844 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/20555abe-8f65-4f7e-8c3a-142b3005184b-console-config\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.363866 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfrx6\" (UniqueName: \"kubernetes.io/projected/20555abe-8f65-4f7e-8c3a-142b3005184b-kube-api-access-tfrx6\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.363892 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/20555abe-8f65-4f7e-8c3a-142b3005184b-console-serving-cert\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.363910 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/20555abe-8f65-4f7e-8c3a-142b3005184b-console-oauth-config\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.363928 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/20555abe-8f65-4f7e-8c3a-142b3005184b-oauth-serving-cert\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.364955 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/20555abe-8f65-4f7e-8c3a-142b3005184b-oauth-serving-cert\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.365077 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/20555abe-8f65-4f7e-8c3a-142b3005184b-console-config\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.365940 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/20555abe-8f65-4f7e-8c3a-142b3005184b-service-ca\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.367378 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/20555abe-8f65-4f7e-8c3a-142b3005184b-trusted-ca-bundle\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.371244 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/20555abe-8f65-4f7e-8c3a-142b3005184b-console-serving-cert\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.371379 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/20555abe-8f65-4f7e-8c3a-142b3005184b-console-oauth-config\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.384664 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfrx6\" (UniqueName: \"kubernetes.io/projected/20555abe-8f65-4f7e-8c3a-142b3005184b-kube-api-access-tfrx6\") pod \"console-785d49f475-9ck8f\" (UID: \"20555abe-8f65-4f7e-8c3a-142b3005184b\") " pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.496331 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4vsz5"] Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.499161 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2zln" Dec 10 07:00:02 crc kubenswrapper[4765]: W1210 07:00:02.501212 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9ed9df09_dada_4084_8201_d969872b21d7.slice/crio-7daf8da9a4ec9f9e9be26754dab75a5de812bdf76b56e9277d2b1548d5aa7ad9 WatchSource:0}: Error finding container 7daf8da9a4ec9f9e9be26754dab75a5de812bdf76b56e9277d2b1548d5aa7ad9: Status 404 returned error can't find the container with id 7daf8da9a4ec9f9e9be26754dab75a5de812bdf76b56e9277d2b1548d5aa7ad9 Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.516646 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4vsz5" event={"ID":"9ed9df09-dada-4084-8201-d969872b21d7","Type":"ContainerStarted","Data":"7daf8da9a4ec9f9e9be26754dab75a5de812bdf76b56e9277d2b1548d5aa7ad9"} Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.519329 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-t5v5s" event={"ID":"82aebff8-bbbf-404f-a932-e336e4c46383","Type":"ContainerStarted","Data":"1ab482542e431a8e3aa1b649f4c40782cbee618bb74ce5e785cc35aaa14bae11"} Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.568515 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.647597 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-cwjdq"] Dec 10 07:00:02 crc kubenswrapper[4765]: W1210 07:00:02.657885 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb741a1af_d805_40d7_ad55_af348e32753d.slice/crio-33cf4f8833a94477c47cf5212ff996f61b188d4e326523c4df4b4918f0dd57f8 WatchSource:0}: Error finding container 33cf4f8833a94477c47cf5212ff996f61b188d4e326523c4df4b4918f0dd57f8: Status 404 returned error can't find the container with id 33cf4f8833a94477c47cf5212ff996f61b188d4e326523c4df4b4918f0dd57f8 Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.737554 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2zln"] Dec 10 07:00:02 crc kubenswrapper[4765]: W1210 07:00:02.744868 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd4218171_d405_4118_9535_737b242c9453.slice/crio-8d8b8cecbc361145d87de1671d51a484b8c19567a6c14b33555ea067c89bbcc6 WatchSource:0}: Error finding container 8d8b8cecbc361145d87de1671d51a484b8c19567a6c14b33555ea067c89bbcc6: Status 404 returned error can't find the container with id 8d8b8cecbc361145d87de1671d51a484b8c19567a6c14b33555ea067c89bbcc6 Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.750910 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.870002 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2666b91c-c78a-4c38-92d7-a4322fc55bd8-secret-volume\") pod \"2666b91c-c78a-4c38-92d7-a4322fc55bd8\" (UID: \"2666b91c-c78a-4c38-92d7-a4322fc55bd8\") " Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.870161 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nls5\" (UniqueName: \"kubernetes.io/projected/2666b91c-c78a-4c38-92d7-a4322fc55bd8-kube-api-access-5nls5\") pod \"2666b91c-c78a-4c38-92d7-a4322fc55bd8\" (UID: \"2666b91c-c78a-4c38-92d7-a4322fc55bd8\") " Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.870187 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2666b91c-c78a-4c38-92d7-a4322fc55bd8-config-volume\") pod \"2666b91c-c78a-4c38-92d7-a4322fc55bd8\" (UID: \"2666b91c-c78a-4c38-92d7-a4322fc55bd8\") " Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.871365 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2666b91c-c78a-4c38-92d7-a4322fc55bd8-config-volume" (OuterVolumeSpecName: "config-volume") pod "2666b91c-c78a-4c38-92d7-a4322fc55bd8" (UID: "2666b91c-c78a-4c38-92d7-a4322fc55bd8"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.871760 4765 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2666b91c-c78a-4c38-92d7-a4322fc55bd8-config-volume\") on node \"crc\" DevicePath \"\"" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.874079 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2666b91c-c78a-4c38-92d7-a4322fc55bd8-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2666b91c-c78a-4c38-92d7-a4322fc55bd8" (UID: "2666b91c-c78a-4c38-92d7-a4322fc55bd8"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.874145 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2666b91c-c78a-4c38-92d7-a4322fc55bd8-kube-api-access-5nls5" (OuterVolumeSpecName: "kube-api-access-5nls5") pod "2666b91c-c78a-4c38-92d7-a4322fc55bd8" (UID: "2666b91c-c78a-4c38-92d7-a4322fc55bd8"). InnerVolumeSpecName "kube-api-access-5nls5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.972552 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nls5\" (UniqueName: \"kubernetes.io/projected/2666b91c-c78a-4c38-92d7-a4322fc55bd8-kube-api-access-5nls5\") on node \"crc\" DevicePath \"\"" Dec 10 07:00:02 crc kubenswrapper[4765]: I1210 07:00:02.972586 4765 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2666b91c-c78a-4c38-92d7-a4322fc55bd8-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 10 07:00:03 crc kubenswrapper[4765]: I1210 07:00:03.007240 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-785d49f475-9ck8f"] Dec 10 07:00:03 crc kubenswrapper[4765]: W1210 07:00:03.010527 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod20555abe_8f65_4f7e_8c3a_142b3005184b.slice/crio-e9d81d10f1cd7e3e6a874a2319821e871694d58dd92527764f8a9112280084fc WatchSource:0}: Error finding container e9d81d10f1cd7e3e6a874a2319821e871694d58dd92527764f8a9112280084fc: Status 404 returned error can't find the container with id e9d81d10f1cd7e3e6a874a2319821e871694d58dd92527764f8a9112280084fc Dec 10 07:00:03 crc kubenswrapper[4765]: I1210 07:00:03.525448 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2zln" event={"ID":"d4218171-d405-4118-9535-737b242c9453","Type":"ContainerStarted","Data":"8d8b8cecbc361145d87de1671d51a484b8c19567a6c14b33555ea067c89bbcc6"} Dec 10 07:00:03 crc kubenswrapper[4765]: I1210 07:00:03.526884 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-cwjdq" event={"ID":"b741a1af-d805-40d7-ad55-af348e32753d","Type":"ContainerStarted","Data":"33cf4f8833a94477c47cf5212ff996f61b188d4e326523c4df4b4918f0dd57f8"} Dec 10 07:00:03 crc kubenswrapper[4765]: I1210 07:00:03.529043 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz" event={"ID":"2666b91c-c78a-4c38-92d7-a4322fc55bd8","Type":"ContainerDied","Data":"6f91ad171355033a671c8f13efd93d66ae2554159abf5297fa2559e6fcda33bd"} Dec 10 07:00:03 crc kubenswrapper[4765]: I1210 07:00:03.529079 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f91ad171355033a671c8f13efd93d66ae2554159abf5297fa2559e6fcda33bd" Dec 10 07:00:03 crc kubenswrapper[4765]: I1210 07:00:03.529105 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz" Dec 10 07:00:03 crc kubenswrapper[4765]: I1210 07:00:03.530415 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-785d49f475-9ck8f" event={"ID":"20555abe-8f65-4f7e-8c3a-142b3005184b","Type":"ContainerStarted","Data":"f658c1eb9154381f85cdfbb553ce14c2b9d6438d63db81aa0c073e7811da142a"} Dec 10 07:00:03 crc kubenswrapper[4765]: I1210 07:00:03.530462 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-785d49f475-9ck8f" event={"ID":"20555abe-8f65-4f7e-8c3a-142b3005184b","Type":"ContainerStarted","Data":"e9d81d10f1cd7e3e6a874a2319821e871694d58dd92527764f8a9112280084fc"} Dec 10 07:00:03 crc kubenswrapper[4765]: I1210 07:00:03.548194 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-785d49f475-9ck8f" podStartSLOduration=1.548177733 podStartE2EDuration="1.548177733s" podCreationTimestamp="2025-12-10 07:00:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:00:03.547840513 +0000 UTC m=+723.274505839" watchObservedRunningTime="2025-12-10 07:00:03.548177733 +0000 UTC m=+723.274843049" Dec 10 07:00:04 crc kubenswrapper[4765]: I1210 07:00:04.050887 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:00:04 crc kubenswrapper[4765]: I1210 07:00:04.050988 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:00:07 crc kubenswrapper[4765]: I1210 07:00:07.749494 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4vsz5" event={"ID":"9ed9df09-dada-4084-8201-d969872b21d7","Type":"ContainerStarted","Data":"2fef92f27b79c3bd666ffe09a1250f3a938b9801ded8650a0d096be1a23619b7"} Dec 10 07:00:07 crc kubenswrapper[4765]: I1210 07:00:07.751228 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2zln" event={"ID":"d4218171-d405-4118-9535-737b242c9453","Type":"ContainerStarted","Data":"8ec63fb8c65bdc0b6ef4934564c904005a00dbf2fe13cb1db370902e179e7d89"} Dec 10 07:00:07 crc kubenswrapper[4765]: I1210 07:00:07.751417 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2zln" Dec 10 07:00:07 crc kubenswrapper[4765]: I1210 07:00:07.753551 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-cwjdq" event={"ID":"b741a1af-d805-40d7-ad55-af348e32753d","Type":"ContainerStarted","Data":"3a5869e03019a5be870b52980d6582ec3428c8c06d0ea0ae7215976da59bae3a"} Dec 10 07:00:07 crc kubenswrapper[4765]: I1210 07:00:07.755271 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-t5v5s" event={"ID":"82aebff8-bbbf-404f-a932-e336e4c46383","Type":"ContainerStarted","Data":"1527bd9d09862b0ee1662c9ab35df75b11d4a52fef0f331e7f5c2955534e7a20"} Dec 10 07:00:07 crc kubenswrapper[4765]: I1210 07:00:07.755396 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-t5v5s" Dec 10 07:00:07 crc kubenswrapper[4765]: I1210 07:00:07.766395 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-4vsz5" podStartSLOduration=2.006619294 podStartE2EDuration="6.766375472s" podCreationTimestamp="2025-12-10 07:00:01 +0000 UTC" firstStartedPulling="2025-12-10 07:00:02.506760801 +0000 UTC m=+722.233426117" lastFinishedPulling="2025-12-10 07:00:07.266516979 +0000 UTC m=+726.993182295" observedRunningTime="2025-12-10 07:00:07.76526396 +0000 UTC m=+727.491929276" watchObservedRunningTime="2025-12-10 07:00:07.766375472 +0000 UTC m=+727.493040788" Dec 10 07:00:07 crc kubenswrapper[4765]: I1210 07:00:07.786072 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2zln" podStartSLOduration=2.265673715 podStartE2EDuration="6.786052061s" podCreationTimestamp="2025-12-10 07:00:01 +0000 UTC" firstStartedPulling="2025-12-10 07:00:02.746599176 +0000 UTC m=+722.473264492" lastFinishedPulling="2025-12-10 07:00:07.266977522 +0000 UTC m=+726.993642838" observedRunningTime="2025-12-10 07:00:07.783590211 +0000 UTC m=+727.510255537" watchObservedRunningTime="2025-12-10 07:00:07.786052061 +0000 UTC m=+727.512717377" Dec 10 07:00:07 crc kubenswrapper[4765]: I1210 07:00:07.808229 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-t5v5s" podStartSLOduration=1.812721073 podStartE2EDuration="6.80820913s" podCreationTimestamp="2025-12-10 07:00:01 +0000 UTC" firstStartedPulling="2025-12-10 07:00:02.272530015 +0000 UTC m=+721.999195331" lastFinishedPulling="2025-12-10 07:00:07.268018072 +0000 UTC m=+726.994683388" observedRunningTime="2025-12-10 07:00:07.80467169 +0000 UTC m=+727.531337026" watchObservedRunningTime="2025-12-10 07:00:07.80820913 +0000 UTC m=+727.534874446" Dec 10 07:00:11 crc kubenswrapper[4765]: I1210 07:00:11.776995 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-cwjdq" event={"ID":"b741a1af-d805-40d7-ad55-af348e32753d","Type":"ContainerStarted","Data":"e12b2327bb137cb588f61dfb661800b81fa73bc943505ce6d515200f730a9158"} Dec 10 07:00:11 crc kubenswrapper[4765]: I1210 07:00:11.791321 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-cwjdq" podStartSLOduration=2.50447595 podStartE2EDuration="10.791300379s" podCreationTimestamp="2025-12-10 07:00:01 +0000 UTC" firstStartedPulling="2025-12-10 07:00:02.660031876 +0000 UTC m=+722.386697202" lastFinishedPulling="2025-12-10 07:00:10.946856315 +0000 UTC m=+730.673521631" observedRunningTime="2025-12-10 07:00:11.789600261 +0000 UTC m=+731.516265577" watchObservedRunningTime="2025-12-10 07:00:11.791300379 +0000 UTC m=+731.517965705" Dec 10 07:00:12 crc kubenswrapper[4765]: I1210 07:00:12.231717 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-t5v5s" Dec 10 07:00:12 crc kubenswrapper[4765]: I1210 07:00:12.569827 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:12 crc kubenswrapper[4765]: I1210 07:00:12.569882 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:12 crc kubenswrapper[4765]: I1210 07:00:12.573920 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:12 crc kubenswrapper[4765]: I1210 07:00:12.788936 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-785d49f475-9ck8f" Dec 10 07:00:12 crc kubenswrapper[4765]: I1210 07:00:12.845677 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-fv6zj"] Dec 10 07:00:22 crc kubenswrapper[4765]: I1210 07:00:22.506310 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2zln" Dec 10 07:00:32 crc kubenswrapper[4765]: I1210 07:00:32.042808 4765 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 10 07:00:34 crc kubenswrapper[4765]: I1210 07:00:34.049865 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:00:34 crc kubenswrapper[4765]: I1210 07:00:34.050669 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:00:35 crc kubenswrapper[4765]: I1210 07:00:35.294426 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5"] Dec 10 07:00:35 crc kubenswrapper[4765]: E1210 07:00:35.294645 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2666b91c-c78a-4c38-92d7-a4322fc55bd8" containerName="collect-profiles" Dec 10 07:00:35 crc kubenswrapper[4765]: I1210 07:00:35.294656 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="2666b91c-c78a-4c38-92d7-a4322fc55bd8" containerName="collect-profiles" Dec 10 07:00:35 crc kubenswrapper[4765]: I1210 07:00:35.294757 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="2666b91c-c78a-4c38-92d7-a4322fc55bd8" containerName="collect-profiles" Dec 10 07:00:35 crc kubenswrapper[4765]: I1210 07:00:35.295461 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5" Dec 10 07:00:35 crc kubenswrapper[4765]: I1210 07:00:35.297534 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 10 07:00:35 crc kubenswrapper[4765]: I1210 07:00:35.340802 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5"] Dec 10 07:00:35 crc kubenswrapper[4765]: I1210 07:00:35.472942 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8a6ab63c-576c-47a1-ad2a-346353164954-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5\" (UID: \"8a6ab63c-576c-47a1-ad2a-346353164954\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5" Dec 10 07:00:35 crc kubenswrapper[4765]: I1210 07:00:35.473281 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfdwg\" (UniqueName: \"kubernetes.io/projected/8a6ab63c-576c-47a1-ad2a-346353164954-kube-api-access-kfdwg\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5\" (UID: \"8a6ab63c-576c-47a1-ad2a-346353164954\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5" Dec 10 07:00:35 crc kubenswrapper[4765]: I1210 07:00:35.473351 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8a6ab63c-576c-47a1-ad2a-346353164954-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5\" (UID: \"8a6ab63c-576c-47a1-ad2a-346353164954\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5" Dec 10 07:00:35 crc kubenswrapper[4765]: I1210 07:00:35.574159 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8a6ab63c-576c-47a1-ad2a-346353164954-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5\" (UID: \"8a6ab63c-576c-47a1-ad2a-346353164954\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5" Dec 10 07:00:35 crc kubenswrapper[4765]: I1210 07:00:35.574513 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfdwg\" (UniqueName: \"kubernetes.io/projected/8a6ab63c-576c-47a1-ad2a-346353164954-kube-api-access-kfdwg\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5\" (UID: \"8a6ab63c-576c-47a1-ad2a-346353164954\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5" Dec 10 07:00:35 crc kubenswrapper[4765]: I1210 07:00:35.574661 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8a6ab63c-576c-47a1-ad2a-346353164954-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5\" (UID: \"8a6ab63c-576c-47a1-ad2a-346353164954\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5" Dec 10 07:00:35 crc kubenswrapper[4765]: I1210 07:00:35.575114 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8a6ab63c-576c-47a1-ad2a-346353164954-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5\" (UID: \"8a6ab63c-576c-47a1-ad2a-346353164954\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5" Dec 10 07:00:35 crc kubenswrapper[4765]: I1210 07:00:35.575354 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8a6ab63c-576c-47a1-ad2a-346353164954-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5\" (UID: \"8a6ab63c-576c-47a1-ad2a-346353164954\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5" Dec 10 07:00:35 crc kubenswrapper[4765]: I1210 07:00:35.601518 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfdwg\" (UniqueName: \"kubernetes.io/projected/8a6ab63c-576c-47a1-ad2a-346353164954-kube-api-access-kfdwg\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5\" (UID: \"8a6ab63c-576c-47a1-ad2a-346353164954\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5" Dec 10 07:00:35 crc kubenswrapper[4765]: I1210 07:00:35.611385 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5" Dec 10 07:00:36 crc kubenswrapper[4765]: I1210 07:00:36.133939 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5"] Dec 10 07:00:36 crc kubenswrapper[4765]: I1210 07:00:36.923711 4765 generic.go:334] "Generic (PLEG): container finished" podID="8a6ab63c-576c-47a1-ad2a-346353164954" containerID="d32c2859c6e0174056585e3571dbda1e155c343f407dcfd82ffb887639146803" exitCode=0 Dec 10 07:00:36 crc kubenswrapper[4765]: I1210 07:00:36.923942 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5" event={"ID":"8a6ab63c-576c-47a1-ad2a-346353164954","Type":"ContainerDied","Data":"d32c2859c6e0174056585e3571dbda1e155c343f407dcfd82ffb887639146803"} Dec 10 07:00:36 crc kubenswrapper[4765]: I1210 07:00:36.924012 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5" event={"ID":"8a6ab63c-576c-47a1-ad2a-346353164954","Type":"ContainerStarted","Data":"7d6e88e9adfc02af05e93166b9de9e84c525bcbd066a5fe12e21eea165f68280"} Dec 10 07:00:37 crc kubenswrapper[4765]: I1210 07:00:37.889035 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-fv6zj" podUID="2d2049d7-de64-4070-959f-8cefd1f15e5d" containerName="console" containerID="cri-o://2fe86f040b2008350cad1cc23ecb2c74e4bbae2647a55a37b06dedfb99f2ed20" gracePeriod=15 Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.285964 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-fv6zj_2d2049d7-de64-4070-959f-8cefd1f15e5d/console/0.log" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.286067 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.417695 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-trusted-ca-bundle\") pod \"2d2049d7-de64-4070-959f-8cefd1f15e5d\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.417853 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-console-config\") pod \"2d2049d7-de64-4070-959f-8cefd1f15e5d\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.417894 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d2049d7-de64-4070-959f-8cefd1f15e5d-console-serving-cert\") pod \"2d2049d7-de64-4070-959f-8cefd1f15e5d\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.417937 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-service-ca\") pod \"2d2049d7-de64-4070-959f-8cefd1f15e5d\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.417958 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-685ww\" (UniqueName: \"kubernetes.io/projected/2d2049d7-de64-4070-959f-8cefd1f15e5d-kube-api-access-685ww\") pod \"2d2049d7-de64-4070-959f-8cefd1f15e5d\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.418000 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-oauth-serving-cert\") pod \"2d2049d7-de64-4070-959f-8cefd1f15e5d\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.418030 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2d2049d7-de64-4070-959f-8cefd1f15e5d-console-oauth-config\") pod \"2d2049d7-de64-4070-959f-8cefd1f15e5d\" (UID: \"2d2049d7-de64-4070-959f-8cefd1f15e5d\") " Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.418697 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "2d2049d7-de64-4070-959f-8cefd1f15e5d" (UID: "2d2049d7-de64-4070-959f-8cefd1f15e5d"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.418718 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-console-config" (OuterVolumeSpecName: "console-config") pod "2d2049d7-de64-4070-959f-8cefd1f15e5d" (UID: "2d2049d7-de64-4070-959f-8cefd1f15e5d"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.418711 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "2d2049d7-de64-4070-959f-8cefd1f15e5d" (UID: "2d2049d7-de64-4070-959f-8cefd1f15e5d"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.418870 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-service-ca" (OuterVolumeSpecName: "service-ca") pod "2d2049d7-de64-4070-959f-8cefd1f15e5d" (UID: "2d2049d7-de64-4070-959f-8cefd1f15e5d"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.424454 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2049d7-de64-4070-959f-8cefd1f15e5d-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "2d2049d7-de64-4070-959f-8cefd1f15e5d" (UID: "2d2049d7-de64-4070-959f-8cefd1f15e5d"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.424687 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d2049d7-de64-4070-959f-8cefd1f15e5d-kube-api-access-685ww" (OuterVolumeSpecName: "kube-api-access-685ww") pod "2d2049d7-de64-4070-959f-8cefd1f15e5d" (UID: "2d2049d7-de64-4070-959f-8cefd1f15e5d"). InnerVolumeSpecName "kube-api-access-685ww". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.424688 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2049d7-de64-4070-959f-8cefd1f15e5d-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "2d2049d7-de64-4070-959f-8cefd1f15e5d" (UID: "2d2049d7-de64-4070-959f-8cefd1f15e5d"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.519397 4765 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-console-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.519428 4765 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d2049d7-de64-4070-959f-8cefd1f15e5d-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.519440 4765 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-service-ca\") on node \"crc\" DevicePath \"\"" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.519449 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-685ww\" (UniqueName: \"kubernetes.io/projected/2d2049d7-de64-4070-959f-8cefd1f15e5d-kube-api-access-685ww\") on node \"crc\" DevicePath \"\"" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.519460 4765 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.519468 4765 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2d2049d7-de64-4070-959f-8cefd1f15e5d-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.519476 4765 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d2049d7-de64-4070-959f-8cefd1f15e5d-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.659078 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mks7d"] Dec 10 07:00:38 crc kubenswrapper[4765]: E1210 07:00:38.659678 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d2049d7-de64-4070-959f-8cefd1f15e5d" containerName="console" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.659697 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d2049d7-de64-4070-959f-8cefd1f15e5d" containerName="console" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.660159 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d2049d7-de64-4070-959f-8cefd1f15e5d" containerName="console" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.661424 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mks7d" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.670989 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mks7d"] Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.823805 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e701c133-bdb9-4d2c-975c-c08ac7f8205b-catalog-content\") pod \"redhat-operators-mks7d\" (UID: \"e701c133-bdb9-4d2c-975c-c08ac7f8205b\") " pod="openshift-marketplace/redhat-operators-mks7d" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.823886 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e701c133-bdb9-4d2c-975c-c08ac7f8205b-utilities\") pod \"redhat-operators-mks7d\" (UID: \"e701c133-bdb9-4d2c-975c-c08ac7f8205b\") " pod="openshift-marketplace/redhat-operators-mks7d" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.824026 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmjcm\" (UniqueName: \"kubernetes.io/projected/e701c133-bdb9-4d2c-975c-c08ac7f8205b-kube-api-access-qmjcm\") pod \"redhat-operators-mks7d\" (UID: \"e701c133-bdb9-4d2c-975c-c08ac7f8205b\") " pod="openshift-marketplace/redhat-operators-mks7d" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.925473 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e701c133-bdb9-4d2c-975c-c08ac7f8205b-catalog-content\") pod \"redhat-operators-mks7d\" (UID: \"e701c133-bdb9-4d2c-975c-c08ac7f8205b\") " pod="openshift-marketplace/redhat-operators-mks7d" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.925543 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e701c133-bdb9-4d2c-975c-c08ac7f8205b-utilities\") pod \"redhat-operators-mks7d\" (UID: \"e701c133-bdb9-4d2c-975c-c08ac7f8205b\") " pod="openshift-marketplace/redhat-operators-mks7d" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.925587 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmjcm\" (UniqueName: \"kubernetes.io/projected/e701c133-bdb9-4d2c-975c-c08ac7f8205b-kube-api-access-qmjcm\") pod \"redhat-operators-mks7d\" (UID: \"e701c133-bdb9-4d2c-975c-c08ac7f8205b\") " pod="openshift-marketplace/redhat-operators-mks7d" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.926235 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e701c133-bdb9-4d2c-975c-c08ac7f8205b-catalog-content\") pod \"redhat-operators-mks7d\" (UID: \"e701c133-bdb9-4d2c-975c-c08ac7f8205b\") " pod="openshift-marketplace/redhat-operators-mks7d" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.926242 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e701c133-bdb9-4d2c-975c-c08ac7f8205b-utilities\") pod \"redhat-operators-mks7d\" (UID: \"e701c133-bdb9-4d2c-975c-c08ac7f8205b\") " pod="openshift-marketplace/redhat-operators-mks7d" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.936664 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-fv6zj_2d2049d7-de64-4070-959f-8cefd1f15e5d/console/0.log" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.936717 4765 generic.go:334] "Generic (PLEG): container finished" podID="2d2049d7-de64-4070-959f-8cefd1f15e5d" containerID="2fe86f040b2008350cad1cc23ecb2c74e4bbae2647a55a37b06dedfb99f2ed20" exitCode=2 Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.936749 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-fv6zj" event={"ID":"2d2049d7-de64-4070-959f-8cefd1f15e5d","Type":"ContainerDied","Data":"2fe86f040b2008350cad1cc23ecb2c74e4bbae2647a55a37b06dedfb99f2ed20"} Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.936782 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-fv6zj" event={"ID":"2d2049d7-de64-4070-959f-8cefd1f15e5d","Type":"ContainerDied","Data":"92d830dfff9eb1d0c8ef55ced8000b4476e1e85f3c778db530f31e18a7146e25"} Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.936790 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-fv6zj" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.936809 4765 scope.go:117] "RemoveContainer" containerID="2fe86f040b2008350cad1cc23ecb2c74e4bbae2647a55a37b06dedfb99f2ed20" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.944827 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmjcm\" (UniqueName: \"kubernetes.io/projected/e701c133-bdb9-4d2c-975c-c08ac7f8205b-kube-api-access-qmjcm\") pod \"redhat-operators-mks7d\" (UID: \"e701c133-bdb9-4d2c-975c-c08ac7f8205b\") " pod="openshift-marketplace/redhat-operators-mks7d" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.954012 4765 scope.go:117] "RemoveContainer" containerID="2fe86f040b2008350cad1cc23ecb2c74e4bbae2647a55a37b06dedfb99f2ed20" Dec 10 07:00:38 crc kubenswrapper[4765]: E1210 07:00:38.954587 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fe86f040b2008350cad1cc23ecb2c74e4bbae2647a55a37b06dedfb99f2ed20\": container with ID starting with 2fe86f040b2008350cad1cc23ecb2c74e4bbae2647a55a37b06dedfb99f2ed20 not found: ID does not exist" containerID="2fe86f040b2008350cad1cc23ecb2c74e4bbae2647a55a37b06dedfb99f2ed20" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.954645 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fe86f040b2008350cad1cc23ecb2c74e4bbae2647a55a37b06dedfb99f2ed20"} err="failed to get container status \"2fe86f040b2008350cad1cc23ecb2c74e4bbae2647a55a37b06dedfb99f2ed20\": rpc error: code = NotFound desc = could not find container \"2fe86f040b2008350cad1cc23ecb2c74e4bbae2647a55a37b06dedfb99f2ed20\": container with ID starting with 2fe86f040b2008350cad1cc23ecb2c74e4bbae2647a55a37b06dedfb99f2ed20 not found: ID does not exist" Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.958011 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-fv6zj"] Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.962365 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-fv6zj"] Dec 10 07:00:38 crc kubenswrapper[4765]: I1210 07:00:38.989872 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mks7d" Dec 10 07:00:39 crc kubenswrapper[4765]: I1210 07:00:39.225400 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mks7d"] Dec 10 07:00:39 crc kubenswrapper[4765]: I1210 07:00:39.963826 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mks7d" event={"ID":"e701c133-bdb9-4d2c-975c-c08ac7f8205b","Type":"ContainerStarted","Data":"3e17a4e6efb223c7870bd2028351886b6c17fd238753d3cb061c019add11b600"} Dec 10 07:00:40 crc kubenswrapper[4765]: I1210 07:00:40.595546 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d2049d7-de64-4070-959f-8cefd1f15e5d" path="/var/lib/kubelet/pods/2d2049d7-de64-4070-959f-8cefd1f15e5d/volumes" Dec 10 07:00:40 crc kubenswrapper[4765]: I1210 07:00:40.971729 4765 generic.go:334] "Generic (PLEG): container finished" podID="e701c133-bdb9-4d2c-975c-c08ac7f8205b" containerID="931b20f18e808d8fafc840de934708f2d5d320059735ffbcdfd55c033d93e97f" exitCode=0 Dec 10 07:00:40 crc kubenswrapper[4765]: I1210 07:00:40.971779 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mks7d" event={"ID":"e701c133-bdb9-4d2c-975c-c08ac7f8205b","Type":"ContainerDied","Data":"931b20f18e808d8fafc840de934708f2d5d320059735ffbcdfd55c033d93e97f"} Dec 10 07:00:42 crc kubenswrapper[4765]: I1210 07:00:42.986566 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mks7d" event={"ID":"e701c133-bdb9-4d2c-975c-c08ac7f8205b","Type":"ContainerStarted","Data":"5d8551c190c0369c5442079bc25b3c3247547920e5a8b757ff6296dbebba21a2"} Dec 10 07:00:44 crc kubenswrapper[4765]: I1210 07:00:44.276041 4765 generic.go:334] "Generic (PLEG): container finished" podID="8a6ab63c-576c-47a1-ad2a-346353164954" containerID="d566e91b00a7aa930bce98b7fd5865a47b8dfc30121fbbe91a2217bc6478cb13" exitCode=0 Dec 10 07:00:44 crc kubenswrapper[4765]: I1210 07:00:44.276109 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5" event={"ID":"8a6ab63c-576c-47a1-ad2a-346353164954","Type":"ContainerDied","Data":"d566e91b00a7aa930bce98b7fd5865a47b8dfc30121fbbe91a2217bc6478cb13"} Dec 10 07:00:45 crc kubenswrapper[4765]: I1210 07:00:45.290660 4765 generic.go:334] "Generic (PLEG): container finished" podID="8a6ab63c-576c-47a1-ad2a-346353164954" containerID="16a99b1160d573b1ade450adcf99fc5395b78576be44e58f5b66838e6297798a" exitCode=0 Dec 10 07:00:45 crc kubenswrapper[4765]: I1210 07:00:45.290744 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5" event={"ID":"8a6ab63c-576c-47a1-ad2a-346353164954","Type":"ContainerDied","Data":"16a99b1160d573b1ade450adcf99fc5395b78576be44e58f5b66838e6297798a"} Dec 10 07:00:46 crc kubenswrapper[4765]: I1210 07:00:46.555449 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5" Dec 10 07:00:46 crc kubenswrapper[4765]: I1210 07:00:46.740439 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8a6ab63c-576c-47a1-ad2a-346353164954-bundle\") pod \"8a6ab63c-576c-47a1-ad2a-346353164954\" (UID: \"8a6ab63c-576c-47a1-ad2a-346353164954\") " Dec 10 07:00:46 crc kubenswrapper[4765]: I1210 07:00:46.740540 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8a6ab63c-576c-47a1-ad2a-346353164954-util\") pod \"8a6ab63c-576c-47a1-ad2a-346353164954\" (UID: \"8a6ab63c-576c-47a1-ad2a-346353164954\") " Dec 10 07:00:46 crc kubenswrapper[4765]: I1210 07:00:46.740701 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfdwg\" (UniqueName: \"kubernetes.io/projected/8a6ab63c-576c-47a1-ad2a-346353164954-kube-api-access-kfdwg\") pod \"8a6ab63c-576c-47a1-ad2a-346353164954\" (UID: \"8a6ab63c-576c-47a1-ad2a-346353164954\") " Dec 10 07:00:46 crc kubenswrapper[4765]: I1210 07:00:46.741670 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a6ab63c-576c-47a1-ad2a-346353164954-bundle" (OuterVolumeSpecName: "bundle") pod "8a6ab63c-576c-47a1-ad2a-346353164954" (UID: "8a6ab63c-576c-47a1-ad2a-346353164954"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:00:46 crc kubenswrapper[4765]: I1210 07:00:46.741958 4765 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8a6ab63c-576c-47a1-ad2a-346353164954-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:00:46 crc kubenswrapper[4765]: I1210 07:00:46.747307 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a6ab63c-576c-47a1-ad2a-346353164954-kube-api-access-kfdwg" (OuterVolumeSpecName: "kube-api-access-kfdwg") pod "8a6ab63c-576c-47a1-ad2a-346353164954" (UID: "8a6ab63c-576c-47a1-ad2a-346353164954"). InnerVolumeSpecName "kube-api-access-kfdwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:00:46 crc kubenswrapper[4765]: I1210 07:00:46.757667 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a6ab63c-576c-47a1-ad2a-346353164954-util" (OuterVolumeSpecName: "util") pod "8a6ab63c-576c-47a1-ad2a-346353164954" (UID: "8a6ab63c-576c-47a1-ad2a-346353164954"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:00:46 crc kubenswrapper[4765]: I1210 07:00:46.843322 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfdwg\" (UniqueName: \"kubernetes.io/projected/8a6ab63c-576c-47a1-ad2a-346353164954-kube-api-access-kfdwg\") on node \"crc\" DevicePath \"\"" Dec 10 07:00:46 crc kubenswrapper[4765]: I1210 07:00:46.843378 4765 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8a6ab63c-576c-47a1-ad2a-346353164954-util\") on node \"crc\" DevicePath \"\"" Dec 10 07:00:47 crc kubenswrapper[4765]: I1210 07:00:47.302789 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5" Dec 10 07:00:47 crc kubenswrapper[4765]: I1210 07:00:47.302761 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5" event={"ID":"8a6ab63c-576c-47a1-ad2a-346353164954","Type":"ContainerDied","Data":"7d6e88e9adfc02af05e93166b9de9e84c525bcbd066a5fe12e21eea165f68280"} Dec 10 07:00:47 crc kubenswrapper[4765]: I1210 07:00:47.302977 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d6e88e9adfc02af05e93166b9de9e84c525bcbd066a5fe12e21eea165f68280" Dec 10 07:00:47 crc kubenswrapper[4765]: I1210 07:00:47.304987 4765 generic.go:334] "Generic (PLEG): container finished" podID="e701c133-bdb9-4d2c-975c-c08ac7f8205b" containerID="5d8551c190c0369c5442079bc25b3c3247547920e5a8b757ff6296dbebba21a2" exitCode=0 Dec 10 07:00:47 crc kubenswrapper[4765]: I1210 07:00:47.305030 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mks7d" event={"ID":"e701c133-bdb9-4d2c-975c-c08ac7f8205b","Type":"ContainerDied","Data":"5d8551c190c0369c5442079bc25b3c3247547920e5a8b757ff6296dbebba21a2"} Dec 10 07:00:49 crc kubenswrapper[4765]: I1210 07:00:49.317480 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mks7d" event={"ID":"e701c133-bdb9-4d2c-975c-c08ac7f8205b","Type":"ContainerStarted","Data":"dc0c10de3ab52d39a593be310ed7e3119bce5b3569d434c6e57f26dc335b356b"} Dec 10 07:00:49 crc kubenswrapper[4765]: I1210 07:00:49.345563 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mks7d" podStartSLOduration=3.980754971 podStartE2EDuration="11.345544092s" podCreationTimestamp="2025-12-10 07:00:38 +0000 UTC" firstStartedPulling="2025-12-10 07:00:40.973817259 +0000 UTC m=+760.700482615" lastFinishedPulling="2025-12-10 07:00:48.33860642 +0000 UTC m=+768.065271736" observedRunningTime="2025-12-10 07:00:49.341728274 +0000 UTC m=+769.068393590" watchObservedRunningTime="2025-12-10 07:00:49.345544092 +0000 UTC m=+769.072209408" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.574640 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b"] Dec 10 07:00:55 crc kubenswrapper[4765]: E1210 07:00:55.575435 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a6ab63c-576c-47a1-ad2a-346353164954" containerName="util" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.575451 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a6ab63c-576c-47a1-ad2a-346353164954" containerName="util" Dec 10 07:00:55 crc kubenswrapper[4765]: E1210 07:00:55.575475 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a6ab63c-576c-47a1-ad2a-346353164954" containerName="extract" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.575481 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a6ab63c-576c-47a1-ad2a-346353164954" containerName="extract" Dec 10 07:00:55 crc kubenswrapper[4765]: E1210 07:00:55.575514 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a6ab63c-576c-47a1-ad2a-346353164954" containerName="pull" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.575522 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a6ab63c-576c-47a1-ad2a-346353164954" containerName="pull" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.575636 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a6ab63c-576c-47a1-ad2a-346353164954" containerName="extract" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.576154 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.581819 4765 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.581877 4765 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-w7svv" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.581966 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.584350 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.586066 4765 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.594291 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b"] Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.767998 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/165a5bae-361a-45b3-8a95-9b121b537acb-webhook-cert\") pod \"metallb-operator-controller-manager-b9b79bb5f-zds8b\" (UID: \"165a5bae-361a-45b3-8a95-9b121b537acb\") " pod="metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.768043 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/165a5bae-361a-45b3-8a95-9b121b537acb-apiservice-cert\") pod \"metallb-operator-controller-manager-b9b79bb5f-zds8b\" (UID: \"165a5bae-361a-45b3-8a95-9b121b537acb\") " pod="metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.768092 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6nmw\" (UniqueName: \"kubernetes.io/projected/165a5bae-361a-45b3-8a95-9b121b537acb-kube-api-access-h6nmw\") pod \"metallb-operator-controller-manager-b9b79bb5f-zds8b\" (UID: \"165a5bae-361a-45b3-8a95-9b121b537acb\") " pod="metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.868979 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/165a5bae-361a-45b3-8a95-9b121b537acb-webhook-cert\") pod \"metallb-operator-controller-manager-b9b79bb5f-zds8b\" (UID: \"165a5bae-361a-45b3-8a95-9b121b537acb\") " pod="metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.869038 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/165a5bae-361a-45b3-8a95-9b121b537acb-apiservice-cert\") pod \"metallb-operator-controller-manager-b9b79bb5f-zds8b\" (UID: \"165a5bae-361a-45b3-8a95-9b121b537acb\") " pod="metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.869070 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6nmw\" (UniqueName: \"kubernetes.io/projected/165a5bae-361a-45b3-8a95-9b121b537acb-kube-api-access-h6nmw\") pod \"metallb-operator-controller-manager-b9b79bb5f-zds8b\" (UID: \"165a5bae-361a-45b3-8a95-9b121b537acb\") " pod="metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.882186 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/165a5bae-361a-45b3-8a95-9b121b537acb-webhook-cert\") pod \"metallb-operator-controller-manager-b9b79bb5f-zds8b\" (UID: \"165a5bae-361a-45b3-8a95-9b121b537acb\") " pod="metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.885105 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/165a5bae-361a-45b3-8a95-9b121b537acb-apiservice-cert\") pod \"metallb-operator-controller-manager-b9b79bb5f-zds8b\" (UID: \"165a5bae-361a-45b3-8a95-9b121b537acb\") " pod="metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.888171 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6nmw\" (UniqueName: \"kubernetes.io/projected/165a5bae-361a-45b3-8a95-9b121b537acb-kube-api-access-h6nmw\") pod \"metallb-operator-controller-manager-b9b79bb5f-zds8b\" (UID: \"165a5bae-361a-45b3-8a95-9b121b537acb\") " pod="metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b" Dec 10 07:00:55 crc kubenswrapper[4765]: I1210 07:00:55.897531 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b" Dec 10 07:00:56 crc kubenswrapper[4765]: I1210 07:00:56.603466 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw"] Dec 10 07:00:56 crc kubenswrapper[4765]: I1210 07:00:56.604432 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw" Dec 10 07:00:56 crc kubenswrapper[4765]: I1210 07:00:56.610546 4765 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 10 07:00:56 crc kubenswrapper[4765]: I1210 07:00:56.610780 4765 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 10 07:00:56 crc kubenswrapper[4765]: I1210 07:00:56.611140 4765 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-mdq7p" Dec 10 07:00:56 crc kubenswrapper[4765]: I1210 07:00:56.630426 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw"] Dec 10 07:00:56 crc kubenswrapper[4765]: I1210 07:00:56.638874 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e7043cbc-6521-4a96-8324-cf79f32ca135-webhook-cert\") pod \"metallb-operator-webhook-server-5576b55846-vxnvw\" (UID: \"e7043cbc-6521-4a96-8324-cf79f32ca135\") " pod="metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw" Dec 10 07:00:56 crc kubenswrapper[4765]: I1210 07:00:56.638950 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e7043cbc-6521-4a96-8324-cf79f32ca135-apiservice-cert\") pod \"metallb-operator-webhook-server-5576b55846-vxnvw\" (UID: \"e7043cbc-6521-4a96-8324-cf79f32ca135\") " pod="metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw" Dec 10 07:00:56 crc kubenswrapper[4765]: I1210 07:00:56.638983 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvgh6\" (UniqueName: \"kubernetes.io/projected/e7043cbc-6521-4a96-8324-cf79f32ca135-kube-api-access-zvgh6\") pod \"metallb-operator-webhook-server-5576b55846-vxnvw\" (UID: \"e7043cbc-6521-4a96-8324-cf79f32ca135\") " pod="metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw" Dec 10 07:00:56 crc kubenswrapper[4765]: I1210 07:00:56.741059 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e7043cbc-6521-4a96-8324-cf79f32ca135-webhook-cert\") pod \"metallb-operator-webhook-server-5576b55846-vxnvw\" (UID: \"e7043cbc-6521-4a96-8324-cf79f32ca135\") " pod="metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw" Dec 10 07:00:56 crc kubenswrapper[4765]: I1210 07:00:56.741139 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e7043cbc-6521-4a96-8324-cf79f32ca135-apiservice-cert\") pod \"metallb-operator-webhook-server-5576b55846-vxnvw\" (UID: \"e7043cbc-6521-4a96-8324-cf79f32ca135\") " pod="metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw" Dec 10 07:00:56 crc kubenswrapper[4765]: I1210 07:00:56.741173 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvgh6\" (UniqueName: \"kubernetes.io/projected/e7043cbc-6521-4a96-8324-cf79f32ca135-kube-api-access-zvgh6\") pod \"metallb-operator-webhook-server-5576b55846-vxnvw\" (UID: \"e7043cbc-6521-4a96-8324-cf79f32ca135\") " pod="metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw" Dec 10 07:00:56 crc kubenswrapper[4765]: I1210 07:00:56.745995 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e7043cbc-6521-4a96-8324-cf79f32ca135-webhook-cert\") pod \"metallb-operator-webhook-server-5576b55846-vxnvw\" (UID: \"e7043cbc-6521-4a96-8324-cf79f32ca135\") " pod="metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw" Dec 10 07:00:56 crc kubenswrapper[4765]: I1210 07:00:56.746812 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e7043cbc-6521-4a96-8324-cf79f32ca135-apiservice-cert\") pod \"metallb-operator-webhook-server-5576b55846-vxnvw\" (UID: \"e7043cbc-6521-4a96-8324-cf79f32ca135\") " pod="metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw" Dec 10 07:00:56 crc kubenswrapper[4765]: I1210 07:00:56.766973 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvgh6\" (UniqueName: \"kubernetes.io/projected/e7043cbc-6521-4a96-8324-cf79f32ca135-kube-api-access-zvgh6\") pod \"metallb-operator-webhook-server-5576b55846-vxnvw\" (UID: \"e7043cbc-6521-4a96-8324-cf79f32ca135\") " pod="metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw" Dec 10 07:00:56 crc kubenswrapper[4765]: I1210 07:00:56.816689 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b"] Dec 10 07:00:56 crc kubenswrapper[4765]: W1210 07:00:56.829947 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod165a5bae_361a_45b3_8a95_9b121b537acb.slice/crio-3ca87579a5d9baa6ffba834ebb2a756707dd5465269ccf21e0d6df234499b992 WatchSource:0}: Error finding container 3ca87579a5d9baa6ffba834ebb2a756707dd5465269ccf21e0d6df234499b992: Status 404 returned error can't find the container with id 3ca87579a5d9baa6ffba834ebb2a756707dd5465269ccf21e0d6df234499b992 Dec 10 07:00:56 crc kubenswrapper[4765]: I1210 07:00:56.924651 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw" Dec 10 07:00:57 crc kubenswrapper[4765]: I1210 07:00:57.230302 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw"] Dec 10 07:00:57 crc kubenswrapper[4765]: W1210 07:00:57.248078 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7043cbc_6521_4a96_8324_cf79f32ca135.slice/crio-e8947c92c30479233cc2a657fd21bb6a5ebb52b79f84f0d74c58b511a3a03f9a WatchSource:0}: Error finding container e8947c92c30479233cc2a657fd21bb6a5ebb52b79f84f0d74c58b511a3a03f9a: Status 404 returned error can't find the container with id e8947c92c30479233cc2a657fd21bb6a5ebb52b79f84f0d74c58b511a3a03f9a Dec 10 07:00:57 crc kubenswrapper[4765]: I1210 07:00:57.365346 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b" event={"ID":"165a5bae-361a-45b3-8a95-9b121b537acb","Type":"ContainerStarted","Data":"3ca87579a5d9baa6ffba834ebb2a756707dd5465269ccf21e0d6df234499b992"} Dec 10 07:00:57 crc kubenswrapper[4765]: I1210 07:00:57.366418 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw" event={"ID":"e7043cbc-6521-4a96-8324-cf79f32ca135","Type":"ContainerStarted","Data":"e8947c92c30479233cc2a657fd21bb6a5ebb52b79f84f0d74c58b511a3a03f9a"} Dec 10 07:00:58 crc kubenswrapper[4765]: I1210 07:00:58.990957 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mks7d" Dec 10 07:00:58 crc kubenswrapper[4765]: I1210 07:00:58.991035 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mks7d" Dec 10 07:00:59 crc kubenswrapper[4765]: I1210 07:00:59.046111 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mks7d" Dec 10 07:00:59 crc kubenswrapper[4765]: I1210 07:00:59.457206 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mks7d" Dec 10 07:01:02 crc kubenswrapper[4765]: I1210 07:01:02.655250 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mks7d"] Dec 10 07:01:02 crc kubenswrapper[4765]: I1210 07:01:02.655890 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mks7d" podUID="e701c133-bdb9-4d2c-975c-c08ac7f8205b" containerName="registry-server" containerID="cri-o://dc0c10de3ab52d39a593be310ed7e3119bce5b3569d434c6e57f26dc335b356b" gracePeriod=2 Dec 10 07:01:03 crc kubenswrapper[4765]: I1210 07:01:03.475569 4765 generic.go:334] "Generic (PLEG): container finished" podID="e701c133-bdb9-4d2c-975c-c08ac7f8205b" containerID="dc0c10de3ab52d39a593be310ed7e3119bce5b3569d434c6e57f26dc335b356b" exitCode=0 Dec 10 07:01:03 crc kubenswrapper[4765]: I1210 07:01:03.475623 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mks7d" event={"ID":"e701c133-bdb9-4d2c-975c-c08ac7f8205b","Type":"ContainerDied","Data":"dc0c10de3ab52d39a593be310ed7e3119bce5b3569d434c6e57f26dc335b356b"} Dec 10 07:01:04 crc kubenswrapper[4765]: I1210 07:01:04.050856 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:01:04 crc kubenswrapper[4765]: I1210 07:01:04.051236 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:01:04 crc kubenswrapper[4765]: I1210 07:01:04.051284 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 07:01:04 crc kubenswrapper[4765]: I1210 07:01:04.052026 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c59ae59e8df6d4b5e877dd0073edd70b486d51e68aa6826855c2bd6ec77d4d30"} pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 07:01:04 crc kubenswrapper[4765]: I1210 07:01:04.052096 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" containerID="cri-o://c59ae59e8df6d4b5e877dd0073edd70b486d51e68aa6826855c2bd6ec77d4d30" gracePeriod=600 Dec 10 07:01:04 crc kubenswrapper[4765]: I1210 07:01:04.590869 4765 generic.go:334] "Generic (PLEG): container finished" podID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerID="c59ae59e8df6d4b5e877dd0073edd70b486d51e68aa6826855c2bd6ec77d4d30" exitCode=0 Dec 10 07:01:04 crc kubenswrapper[4765]: I1210 07:01:04.600602 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerDied","Data":"c59ae59e8df6d4b5e877dd0073edd70b486d51e68aa6826855c2bd6ec77d4d30"} Dec 10 07:01:04 crc kubenswrapper[4765]: I1210 07:01:04.600839 4765 scope.go:117] "RemoveContainer" containerID="d5e023799d44ef5db29f950e818bd172f5ecb1b760503fe8e90e8ba03b4e7b9e" Dec 10 07:01:06 crc kubenswrapper[4765]: I1210 07:01:06.984825 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mks7d" Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.207435 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e701c133-bdb9-4d2c-975c-c08ac7f8205b-catalog-content\") pod \"e701c133-bdb9-4d2c-975c-c08ac7f8205b\" (UID: \"e701c133-bdb9-4d2c-975c-c08ac7f8205b\") " Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.207489 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmjcm\" (UniqueName: \"kubernetes.io/projected/e701c133-bdb9-4d2c-975c-c08ac7f8205b-kube-api-access-qmjcm\") pod \"e701c133-bdb9-4d2c-975c-c08ac7f8205b\" (UID: \"e701c133-bdb9-4d2c-975c-c08ac7f8205b\") " Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.207561 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e701c133-bdb9-4d2c-975c-c08ac7f8205b-utilities\") pod \"e701c133-bdb9-4d2c-975c-c08ac7f8205b\" (UID: \"e701c133-bdb9-4d2c-975c-c08ac7f8205b\") " Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.208726 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e701c133-bdb9-4d2c-975c-c08ac7f8205b-utilities" (OuterVolumeSpecName: "utilities") pod "e701c133-bdb9-4d2c-975c-c08ac7f8205b" (UID: "e701c133-bdb9-4d2c-975c-c08ac7f8205b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.218025 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e701c133-bdb9-4d2c-975c-c08ac7f8205b-kube-api-access-qmjcm" (OuterVolumeSpecName: "kube-api-access-qmjcm") pod "e701c133-bdb9-4d2c-975c-c08ac7f8205b" (UID: "e701c133-bdb9-4d2c-975c-c08ac7f8205b"). InnerVolumeSpecName "kube-api-access-qmjcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.308748 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmjcm\" (UniqueName: \"kubernetes.io/projected/e701c133-bdb9-4d2c-975c-c08ac7f8205b-kube-api-access-qmjcm\") on node \"crc\" DevicePath \"\"" Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.308792 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e701c133-bdb9-4d2c-975c-c08ac7f8205b-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.369690 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e701c133-bdb9-4d2c-975c-c08ac7f8205b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e701c133-bdb9-4d2c-975c-c08ac7f8205b" (UID: "e701c133-bdb9-4d2c-975c-c08ac7f8205b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.409757 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e701c133-bdb9-4d2c-975c-c08ac7f8205b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.608563 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b" event={"ID":"165a5bae-361a-45b3-8a95-9b121b537acb","Type":"ContainerStarted","Data":"261b28fd52572bd4819d112dfb8ae57b7eb35d166aa488b0960494c00c806000"} Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.609075 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b" Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.610547 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mks7d" event={"ID":"e701c133-bdb9-4d2c-975c-c08ac7f8205b","Type":"ContainerDied","Data":"3e17a4e6efb223c7870bd2028351886b6c17fd238753d3cb061c019add11b600"} Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.610584 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mks7d" Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.610618 4765 scope.go:117] "RemoveContainer" containerID="dc0c10de3ab52d39a593be310ed7e3119bce5b3569d434c6e57f26dc335b356b" Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.613909 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"1a2948aa41622b94e272b106fd506ab6099b9c866ae8f86fff9e5dbf9e54046e"} Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.615721 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw" event={"ID":"e7043cbc-6521-4a96-8324-cf79f32ca135","Type":"ContainerStarted","Data":"3f8edf66770deefea6e6a75df21bc641a83c29173989975a14869ae267010db0"} Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.615847 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw" Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.629836 4765 scope.go:117] "RemoveContainer" containerID="5d8551c190c0369c5442079bc25b3c3247547920e5a8b757ff6296dbebba21a2" Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.634941 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b" podStartSLOduration=2.705566522 podStartE2EDuration="12.634889787s" podCreationTimestamp="2025-12-10 07:00:55 +0000 UTC" firstStartedPulling="2025-12-10 07:00:56.832710359 +0000 UTC m=+776.559375675" lastFinishedPulling="2025-12-10 07:01:06.762033624 +0000 UTC m=+786.488698940" observedRunningTime="2025-12-10 07:01:07.630578404 +0000 UTC m=+787.357243720" watchObservedRunningTime="2025-12-10 07:01:07.634889787 +0000 UTC m=+787.361555103" Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.658307 4765 scope.go:117] "RemoveContainer" containerID="931b20f18e808d8fafc840de934708f2d5d320059735ffbcdfd55c033d93e97f" Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.682237 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw" podStartSLOduration=2.1736630359999998 podStartE2EDuration="11.682215426s" podCreationTimestamp="2025-12-10 07:00:56 +0000 UTC" firstStartedPulling="2025-12-10 07:00:57.250787928 +0000 UTC m=+776.977453234" lastFinishedPulling="2025-12-10 07:01:06.759340308 +0000 UTC m=+786.486005624" observedRunningTime="2025-12-10 07:01:07.681183597 +0000 UTC m=+787.407848913" watchObservedRunningTime="2025-12-10 07:01:07.682215426 +0000 UTC m=+787.408880742" Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.697611 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mks7d"] Dec 10 07:01:07 crc kubenswrapper[4765]: I1210 07:01:07.700732 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mks7d"] Dec 10 07:01:08 crc kubenswrapper[4765]: I1210 07:01:08.597441 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e701c133-bdb9-4d2c-975c-c08ac7f8205b" path="/var/lib/kubelet/pods/e701c133-bdb9-4d2c-975c-c08ac7f8205b/volumes" Dec 10 07:01:16 crc kubenswrapper[4765]: I1210 07:01:16.930871 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5576b55846-vxnvw" Dec 10 07:01:45 crc kubenswrapper[4765]: I1210 07:01:45.901776 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-b9b79bb5f-zds8b" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.692064 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-p2jnh"] Dec 10 07:01:46 crc kubenswrapper[4765]: E1210 07:01:46.692365 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e701c133-bdb9-4d2c-975c-c08ac7f8205b" containerName="extract-content" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.692385 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e701c133-bdb9-4d2c-975c-c08ac7f8205b" containerName="extract-content" Dec 10 07:01:46 crc kubenswrapper[4765]: E1210 07:01:46.692396 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e701c133-bdb9-4d2c-975c-c08ac7f8205b" containerName="extract-utilities" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.692404 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e701c133-bdb9-4d2c-975c-c08ac7f8205b" containerName="extract-utilities" Dec 10 07:01:46 crc kubenswrapper[4765]: E1210 07:01:46.692424 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e701c133-bdb9-4d2c-975c-c08ac7f8205b" containerName="registry-server" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.692432 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e701c133-bdb9-4d2c-975c-c08ac7f8205b" containerName="registry-server" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.692562 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="e701c133-bdb9-4d2c-975c-c08ac7f8205b" containerName="registry-server" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.693044 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p2jnh" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.695285 4765 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.696877 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-k8jnb"] Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.697542 4765 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-8tgst" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.699609 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.704259 4765 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.704265 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.706614 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-p2jnh"] Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.731386 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/85f0f1bc-04a6-497e-8781-6be917b5be98-frr-conf\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.731436 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9m97b\" (UniqueName: \"kubernetes.io/projected/85f0f1bc-04a6-497e-8781-6be917b5be98-kube-api-access-9m97b\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.731472 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/85f0f1bc-04a6-497e-8781-6be917b5be98-metrics-certs\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.731589 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/85f0f1bc-04a6-497e-8781-6be917b5be98-frr-sockets\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.731654 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqgf7\" (UniqueName: \"kubernetes.io/projected/d9898042-3b0b-4505-bb28-d9baf6939ee1-kube-api-access-zqgf7\") pod \"frr-k8s-webhook-server-7fcb986d4-p2jnh\" (UID: \"d9898042-3b0b-4505-bb28-d9baf6939ee1\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p2jnh" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.731682 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/85f0f1bc-04a6-497e-8781-6be917b5be98-reloader\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.731751 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d9898042-3b0b-4505-bb28-d9baf6939ee1-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-p2jnh\" (UID: \"d9898042-3b0b-4505-bb28-d9baf6939ee1\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p2jnh" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.731797 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/85f0f1bc-04a6-497e-8781-6be917b5be98-metrics\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.731871 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/85f0f1bc-04a6-497e-8781-6be917b5be98-frr-startup\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.799160 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-kw6d9"] Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.800240 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-kw6d9" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.804784 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.805023 4765 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.806700 4765 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-qnnrf" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.808316 4765 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.822172 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-c7l82"] Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.823045 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-c7l82" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.824853 4765 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.832745 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/85f0f1bc-04a6-497e-8781-6be917b5be98-frr-conf\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.832786 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kthx6\" (UniqueName: \"kubernetes.io/projected/31d5b3e4-ecf0-4eb3-a434-5a27643f60c6-kube-api-access-kthx6\") pod \"speaker-kw6d9\" (UID: \"31d5b3e4-ecf0-4eb3-a434-5a27643f60c6\") " pod="metallb-system/speaker-kw6d9" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.832814 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9m97b\" (UniqueName: \"kubernetes.io/projected/85f0f1bc-04a6-497e-8781-6be917b5be98-kube-api-access-9m97b\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.832847 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/85f0f1bc-04a6-497e-8781-6be917b5be98-metrics-certs\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: E1210 07:01:46.832948 4765 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.832988 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/85f0f1bc-04a6-497e-8781-6be917b5be98-frr-sockets\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: E1210 07:01:46.833005 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/85f0f1bc-04a6-497e-8781-6be917b5be98-metrics-certs podName:85f0f1bc-04a6-497e-8781-6be917b5be98 nodeName:}" failed. No retries permitted until 2025-12-10 07:01:47.332987866 +0000 UTC m=+827.059653182 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/85f0f1bc-04a6-497e-8781-6be917b5be98-metrics-certs") pod "frr-k8s-k8jnb" (UID: "85f0f1bc-04a6-497e-8781-6be917b5be98") : secret "frr-k8s-certs-secret" not found Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.833058 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqgf7\" (UniqueName: \"kubernetes.io/projected/d9898042-3b0b-4505-bb28-d9baf6939ee1-kube-api-access-zqgf7\") pod \"frr-k8s-webhook-server-7fcb986d4-p2jnh\" (UID: \"d9898042-3b0b-4505-bb28-d9baf6939ee1\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p2jnh" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.833099 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/85f0f1bc-04a6-497e-8781-6be917b5be98-reloader\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.833131 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/31d5b3e4-ecf0-4eb3-a434-5a27643f60c6-metrics-certs\") pod \"speaker-kw6d9\" (UID: \"31d5b3e4-ecf0-4eb3-a434-5a27643f60c6\") " pod="metallb-system/speaker-kw6d9" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.833188 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d9898042-3b0b-4505-bb28-d9baf6939ee1-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-p2jnh\" (UID: \"d9898042-3b0b-4505-bb28-d9baf6939ee1\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p2jnh" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.833224 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/85f0f1bc-04a6-497e-8781-6be917b5be98-metrics\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.833235 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/85f0f1bc-04a6-497e-8781-6be917b5be98-frr-conf\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.833245 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/31d5b3e4-ecf0-4eb3-a434-5a27643f60c6-metallb-excludel2\") pod \"speaker-kw6d9\" (UID: \"31d5b3e4-ecf0-4eb3-a434-5a27643f60c6\") " pod="metallb-system/speaker-kw6d9" Dec 10 07:01:46 crc kubenswrapper[4765]: E1210 07:01:46.833346 4765 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.833356 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/31d5b3e4-ecf0-4eb3-a434-5a27643f60c6-memberlist\") pod \"speaker-kw6d9\" (UID: \"31d5b3e4-ecf0-4eb3-a434-5a27643f60c6\") " pod="metallb-system/speaker-kw6d9" Dec 10 07:01:46 crc kubenswrapper[4765]: E1210 07:01:46.833395 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d9898042-3b0b-4505-bb28-d9baf6939ee1-cert podName:d9898042-3b0b-4505-bb28-d9baf6939ee1 nodeName:}" failed. No retries permitted until 2025-12-10 07:01:47.333380947 +0000 UTC m=+827.060046263 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d9898042-3b0b-4505-bb28-d9baf6939ee1-cert") pod "frr-k8s-webhook-server-7fcb986d4-p2jnh" (UID: "d9898042-3b0b-4505-bb28-d9baf6939ee1") : secret "frr-k8s-webhook-server-cert" not found Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.833461 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/85f0f1bc-04a6-497e-8781-6be917b5be98-frr-startup\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.833608 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/85f0f1bc-04a6-497e-8781-6be917b5be98-frr-sockets\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.833699 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/85f0f1bc-04a6-497e-8781-6be917b5be98-metrics\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.833700 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/85f0f1bc-04a6-497e-8781-6be917b5be98-reloader\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.834514 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/85f0f1bc-04a6-497e-8781-6be917b5be98-frr-startup\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.847940 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-c7l82"] Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.861914 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqgf7\" (UniqueName: \"kubernetes.io/projected/d9898042-3b0b-4505-bb28-d9baf6939ee1-kube-api-access-zqgf7\") pod \"frr-k8s-webhook-server-7fcb986d4-p2jnh\" (UID: \"d9898042-3b0b-4505-bb28-d9baf6939ee1\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p2jnh" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.874822 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9m97b\" (UniqueName: \"kubernetes.io/projected/85f0f1bc-04a6-497e-8781-6be917b5be98-kube-api-access-9m97b\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.934789 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/31d5b3e4-ecf0-4eb3-a434-5a27643f60c6-metrics-certs\") pod \"speaker-kw6d9\" (UID: \"31d5b3e4-ecf0-4eb3-a434-5a27643f60c6\") " pod="metallb-system/speaker-kw6d9" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.935195 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e24b600b-dc29-47be-9e85-da8a5ac84860-metrics-certs\") pod \"controller-f8648f98b-c7l82\" (UID: \"e24b600b-dc29-47be-9e85-da8a5ac84860\") " pod="metallb-system/controller-f8648f98b-c7l82" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.935240 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/31d5b3e4-ecf0-4eb3-a434-5a27643f60c6-metallb-excludel2\") pod \"speaker-kw6d9\" (UID: \"31d5b3e4-ecf0-4eb3-a434-5a27643f60c6\") " pod="metallb-system/speaker-kw6d9" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.935258 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/31d5b3e4-ecf0-4eb3-a434-5a27643f60c6-memberlist\") pod \"speaker-kw6d9\" (UID: \"31d5b3e4-ecf0-4eb3-a434-5a27643f60c6\") " pod="metallb-system/speaker-kw6d9" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.935296 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkfgh\" (UniqueName: \"kubernetes.io/projected/e24b600b-dc29-47be-9e85-da8a5ac84860-kube-api-access-qkfgh\") pod \"controller-f8648f98b-c7l82\" (UID: \"e24b600b-dc29-47be-9e85-da8a5ac84860\") " pod="metallb-system/controller-f8648f98b-c7l82" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.935320 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kthx6\" (UniqueName: \"kubernetes.io/projected/31d5b3e4-ecf0-4eb3-a434-5a27643f60c6-kube-api-access-kthx6\") pod \"speaker-kw6d9\" (UID: \"31d5b3e4-ecf0-4eb3-a434-5a27643f60c6\") " pod="metallb-system/speaker-kw6d9" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.935341 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e24b600b-dc29-47be-9e85-da8a5ac84860-cert\") pod \"controller-f8648f98b-c7l82\" (UID: \"e24b600b-dc29-47be-9e85-da8a5ac84860\") " pod="metallb-system/controller-f8648f98b-c7l82" Dec 10 07:01:46 crc kubenswrapper[4765]: E1210 07:01:46.935399 4765 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 10 07:01:46 crc kubenswrapper[4765]: E1210 07:01:46.935469 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/31d5b3e4-ecf0-4eb3-a434-5a27643f60c6-memberlist podName:31d5b3e4-ecf0-4eb3-a434-5a27643f60c6 nodeName:}" failed. No retries permitted until 2025-12-10 07:01:47.435451498 +0000 UTC m=+827.162116804 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/31d5b3e4-ecf0-4eb3-a434-5a27643f60c6-memberlist") pod "speaker-kw6d9" (UID: "31d5b3e4-ecf0-4eb3-a434-5a27643f60c6") : secret "metallb-memberlist" not found Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.936100 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/31d5b3e4-ecf0-4eb3-a434-5a27643f60c6-metallb-excludel2\") pod \"speaker-kw6d9\" (UID: \"31d5b3e4-ecf0-4eb3-a434-5a27643f60c6\") " pod="metallb-system/speaker-kw6d9" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.942978 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/31d5b3e4-ecf0-4eb3-a434-5a27643f60c6-metrics-certs\") pod \"speaker-kw6d9\" (UID: \"31d5b3e4-ecf0-4eb3-a434-5a27643f60c6\") " pod="metallb-system/speaker-kw6d9" Dec 10 07:01:46 crc kubenswrapper[4765]: I1210 07:01:46.953630 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kthx6\" (UniqueName: \"kubernetes.io/projected/31d5b3e4-ecf0-4eb3-a434-5a27643f60c6-kube-api-access-kthx6\") pod \"speaker-kw6d9\" (UID: \"31d5b3e4-ecf0-4eb3-a434-5a27643f60c6\") " pod="metallb-system/speaker-kw6d9" Dec 10 07:01:47 crc kubenswrapper[4765]: I1210 07:01:47.036660 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e24b600b-dc29-47be-9e85-da8a5ac84860-metrics-certs\") pod \"controller-f8648f98b-c7l82\" (UID: \"e24b600b-dc29-47be-9e85-da8a5ac84860\") " pod="metallb-system/controller-f8648f98b-c7l82" Dec 10 07:01:47 crc kubenswrapper[4765]: I1210 07:01:47.036780 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkfgh\" (UniqueName: \"kubernetes.io/projected/e24b600b-dc29-47be-9e85-da8a5ac84860-kube-api-access-qkfgh\") pod \"controller-f8648f98b-c7l82\" (UID: \"e24b600b-dc29-47be-9e85-da8a5ac84860\") " pod="metallb-system/controller-f8648f98b-c7l82" Dec 10 07:01:47 crc kubenswrapper[4765]: I1210 07:01:47.036819 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e24b600b-dc29-47be-9e85-da8a5ac84860-cert\") pod \"controller-f8648f98b-c7l82\" (UID: \"e24b600b-dc29-47be-9e85-da8a5ac84860\") " pod="metallb-system/controller-f8648f98b-c7l82" Dec 10 07:01:47 crc kubenswrapper[4765]: I1210 07:01:47.038391 4765 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 10 07:01:47 crc kubenswrapper[4765]: I1210 07:01:47.039998 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e24b600b-dc29-47be-9e85-da8a5ac84860-metrics-certs\") pod \"controller-f8648f98b-c7l82\" (UID: \"e24b600b-dc29-47be-9e85-da8a5ac84860\") " pod="metallb-system/controller-f8648f98b-c7l82" Dec 10 07:01:47 crc kubenswrapper[4765]: I1210 07:01:47.049798 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e24b600b-dc29-47be-9e85-da8a5ac84860-cert\") pod \"controller-f8648f98b-c7l82\" (UID: \"e24b600b-dc29-47be-9e85-da8a5ac84860\") " pod="metallb-system/controller-f8648f98b-c7l82" Dec 10 07:01:47 crc kubenswrapper[4765]: I1210 07:01:47.053484 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkfgh\" (UniqueName: \"kubernetes.io/projected/e24b600b-dc29-47be-9e85-da8a5ac84860-kube-api-access-qkfgh\") pod \"controller-f8648f98b-c7l82\" (UID: \"e24b600b-dc29-47be-9e85-da8a5ac84860\") " pod="metallb-system/controller-f8648f98b-c7l82" Dec 10 07:01:47 crc kubenswrapper[4765]: I1210 07:01:47.139818 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-c7l82" Dec 10 07:01:47 crc kubenswrapper[4765]: I1210 07:01:47.340976 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/85f0f1bc-04a6-497e-8781-6be917b5be98-metrics-certs\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:47 crc kubenswrapper[4765]: I1210 07:01:47.341074 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d9898042-3b0b-4505-bb28-d9baf6939ee1-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-p2jnh\" (UID: \"d9898042-3b0b-4505-bb28-d9baf6939ee1\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p2jnh" Dec 10 07:01:47 crc kubenswrapper[4765]: I1210 07:01:47.345160 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/85f0f1bc-04a6-497e-8781-6be917b5be98-metrics-certs\") pod \"frr-k8s-k8jnb\" (UID: \"85f0f1bc-04a6-497e-8781-6be917b5be98\") " pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:47 crc kubenswrapper[4765]: I1210 07:01:47.345258 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d9898042-3b0b-4505-bb28-d9baf6939ee1-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-p2jnh\" (UID: \"d9898042-3b0b-4505-bb28-d9baf6939ee1\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p2jnh" Dec 10 07:01:47 crc kubenswrapper[4765]: I1210 07:01:47.345304 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-c7l82"] Dec 10 07:01:47 crc kubenswrapper[4765]: I1210 07:01:47.442238 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/31d5b3e4-ecf0-4eb3-a434-5a27643f60c6-memberlist\") pod \"speaker-kw6d9\" (UID: \"31d5b3e4-ecf0-4eb3-a434-5a27643f60c6\") " pod="metallb-system/speaker-kw6d9" Dec 10 07:01:47 crc kubenswrapper[4765]: E1210 07:01:47.442374 4765 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 10 07:01:47 crc kubenswrapper[4765]: E1210 07:01:47.442571 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/31d5b3e4-ecf0-4eb3-a434-5a27643f60c6-memberlist podName:31d5b3e4-ecf0-4eb3-a434-5a27643f60c6 nodeName:}" failed. No retries permitted until 2025-12-10 07:01:48.442556239 +0000 UTC m=+828.169221555 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/31d5b3e4-ecf0-4eb3-a434-5a27643f60c6-memberlist") pod "speaker-kw6d9" (UID: "31d5b3e4-ecf0-4eb3-a434-5a27643f60c6") : secret "metallb-memberlist" not found Dec 10 07:01:47 crc kubenswrapper[4765]: I1210 07:01:47.613071 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p2jnh" Dec 10 07:01:47 crc kubenswrapper[4765]: I1210 07:01:47.625408 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:47 crc kubenswrapper[4765]: I1210 07:01:47.829039 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-c7l82" event={"ID":"e24b600b-dc29-47be-9e85-da8a5ac84860","Type":"ContainerStarted","Data":"6945384ca1a89f0ed700db4c2d4fffc8c0ad460c42cb641d94c692b18280aed4"} Dec 10 07:01:48 crc kubenswrapper[4765]: I1210 07:01:48.029948 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-p2jnh"] Dec 10 07:01:48 crc kubenswrapper[4765]: W1210 07:01:48.038646 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9898042_3b0b_4505_bb28_d9baf6939ee1.slice/crio-a9d1fe6061ed43c0dc0fa18c5f616897d6dfe3e6476a05f02f4378ce5684a401 WatchSource:0}: Error finding container a9d1fe6061ed43c0dc0fa18c5f616897d6dfe3e6476a05f02f4378ce5684a401: Status 404 returned error can't find the container with id a9d1fe6061ed43c0dc0fa18c5f616897d6dfe3e6476a05f02f4378ce5684a401 Dec 10 07:01:48 crc kubenswrapper[4765]: I1210 07:01:48.469644 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/31d5b3e4-ecf0-4eb3-a434-5a27643f60c6-memberlist\") pod \"speaker-kw6d9\" (UID: \"31d5b3e4-ecf0-4eb3-a434-5a27643f60c6\") " pod="metallb-system/speaker-kw6d9" Dec 10 07:01:48 crc kubenswrapper[4765]: I1210 07:01:48.476119 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/31d5b3e4-ecf0-4eb3-a434-5a27643f60c6-memberlist\") pod \"speaker-kw6d9\" (UID: \"31d5b3e4-ecf0-4eb3-a434-5a27643f60c6\") " pod="metallb-system/speaker-kw6d9" Dec 10 07:01:48 crc kubenswrapper[4765]: I1210 07:01:48.616383 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-kw6d9" Dec 10 07:01:48 crc kubenswrapper[4765]: W1210 07:01:48.639894 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod31d5b3e4_ecf0_4eb3_a434_5a27643f60c6.slice/crio-1f3e43ed790f592b413ed5ea87e80c1dd7b718f007ec88319606a843bd60ac80 WatchSource:0}: Error finding container 1f3e43ed790f592b413ed5ea87e80c1dd7b718f007ec88319606a843bd60ac80: Status 404 returned error can't find the container with id 1f3e43ed790f592b413ed5ea87e80c1dd7b718f007ec88319606a843bd60ac80 Dec 10 07:01:48 crc kubenswrapper[4765]: I1210 07:01:48.840472 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-c7l82" event={"ID":"e24b600b-dc29-47be-9e85-da8a5ac84860","Type":"ContainerStarted","Data":"4830b6fa28adf9fe9242eaaa433cf5025e0476c10fbf137eef3f8630867ce768"} Dec 10 07:01:48 crc kubenswrapper[4765]: I1210 07:01:48.840537 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-c7l82" event={"ID":"e24b600b-dc29-47be-9e85-da8a5ac84860","Type":"ContainerStarted","Data":"756ffabf39c0bbfa3bca8c143f6babba4168983c546235b1c175539c25b7594b"} Dec 10 07:01:48 crc kubenswrapper[4765]: I1210 07:01:48.841287 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-c7l82" Dec 10 07:01:48 crc kubenswrapper[4765]: I1210 07:01:48.842593 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-kw6d9" event={"ID":"31d5b3e4-ecf0-4eb3-a434-5a27643f60c6","Type":"ContainerStarted","Data":"1f3e43ed790f592b413ed5ea87e80c1dd7b718f007ec88319606a843bd60ac80"} Dec 10 07:01:48 crc kubenswrapper[4765]: I1210 07:01:48.843824 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p2jnh" event={"ID":"d9898042-3b0b-4505-bb28-d9baf6939ee1","Type":"ContainerStarted","Data":"a9d1fe6061ed43c0dc0fa18c5f616897d6dfe3e6476a05f02f4378ce5684a401"} Dec 10 07:01:48 crc kubenswrapper[4765]: I1210 07:01:48.848444 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k8jnb" event={"ID":"85f0f1bc-04a6-497e-8781-6be917b5be98","Type":"ContainerStarted","Data":"946cffd759f01eb9cd7d1cb4cea598a7ac44c519a4d30c676167f93d0536a943"} Dec 10 07:01:48 crc kubenswrapper[4765]: I1210 07:01:48.874946 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-c7l82" podStartSLOduration=2.874931347 podStartE2EDuration="2.874931347s" podCreationTimestamp="2025-12-10 07:01:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:01:48.87361213 +0000 UTC m=+828.600277446" watchObservedRunningTime="2025-12-10 07:01:48.874931347 +0000 UTC m=+828.601596663" Dec 10 07:01:49 crc kubenswrapper[4765]: I1210 07:01:49.857134 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-kw6d9" event={"ID":"31d5b3e4-ecf0-4eb3-a434-5a27643f60c6","Type":"ContainerStarted","Data":"89145b75672b9b27f71cc760fca8ce6b649a675bed62d89f4e87a3eacfd91ab2"} Dec 10 07:01:49 crc kubenswrapper[4765]: I1210 07:01:49.857241 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-kw6d9" event={"ID":"31d5b3e4-ecf0-4eb3-a434-5a27643f60c6","Type":"ContainerStarted","Data":"53ee4c607bb437c90d424f0cbcf02016e44ca97812454d3a3ba52a815fb00ff7"} Dec 10 07:01:49 crc kubenswrapper[4765]: I1210 07:01:49.857402 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-kw6d9" Dec 10 07:01:49 crc kubenswrapper[4765]: I1210 07:01:49.906847 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-kw6d9" podStartSLOduration=3.906829565 podStartE2EDuration="3.906829565s" podCreationTimestamp="2025-12-10 07:01:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:01:49.899454195 +0000 UTC m=+829.626119521" watchObservedRunningTime="2025-12-10 07:01:49.906829565 +0000 UTC m=+829.633494881" Dec 10 07:01:56 crc kubenswrapper[4765]: I1210 07:01:56.141760 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p2jnh" event={"ID":"d9898042-3b0b-4505-bb28-d9baf6939ee1","Type":"ContainerStarted","Data":"3d84028914d10e061cd7a994ecb6effd57bd1bbdb3bbf77481ab48b26a1480a2"} Dec 10 07:01:56 crc kubenswrapper[4765]: I1210 07:01:56.142712 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p2jnh" Dec 10 07:01:56 crc kubenswrapper[4765]: I1210 07:01:56.143002 4765 generic.go:334] "Generic (PLEG): container finished" podID="85f0f1bc-04a6-497e-8781-6be917b5be98" containerID="bea2dffede79d2241850062113198ed2c5a5f093810f5d11e41d1bd3ab59c34c" exitCode=0 Dec 10 07:01:56 crc kubenswrapper[4765]: I1210 07:01:56.143044 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k8jnb" event={"ID":"85f0f1bc-04a6-497e-8781-6be917b5be98","Type":"ContainerDied","Data":"bea2dffede79d2241850062113198ed2c5a5f093810f5d11e41d1bd3ab59c34c"} Dec 10 07:01:56 crc kubenswrapper[4765]: I1210 07:01:56.159334 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p2jnh" podStartSLOduration=2.43779534 podStartE2EDuration="10.159314961s" podCreationTimestamp="2025-12-10 07:01:46 +0000 UTC" firstStartedPulling="2025-12-10 07:01:48.040734897 +0000 UTC m=+827.767400213" lastFinishedPulling="2025-12-10 07:01:55.762254518 +0000 UTC m=+835.488919834" observedRunningTime="2025-12-10 07:01:56.157896941 +0000 UTC m=+835.884562257" watchObservedRunningTime="2025-12-10 07:01:56.159314961 +0000 UTC m=+835.885980277" Dec 10 07:01:57 crc kubenswrapper[4765]: I1210 07:01:57.144998 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-c7l82" Dec 10 07:01:57 crc kubenswrapper[4765]: I1210 07:01:57.151118 4765 generic.go:334] "Generic (PLEG): container finished" podID="85f0f1bc-04a6-497e-8781-6be917b5be98" containerID="4853a848251a0e93926ffa8ed338919230480abae33bd20f03eb9b16e785ef83" exitCode=0 Dec 10 07:01:57 crc kubenswrapper[4765]: I1210 07:01:57.151170 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k8jnb" event={"ID":"85f0f1bc-04a6-497e-8781-6be917b5be98","Type":"ContainerDied","Data":"4853a848251a0e93926ffa8ed338919230480abae33bd20f03eb9b16e785ef83"} Dec 10 07:01:58 crc kubenswrapper[4765]: I1210 07:01:58.159467 4765 generic.go:334] "Generic (PLEG): container finished" podID="85f0f1bc-04a6-497e-8781-6be917b5be98" containerID="b6f00056009395ac11caf97e36e1d20564834c9b1f964a637ed32a6e12dad430" exitCode=0 Dec 10 07:01:58 crc kubenswrapper[4765]: I1210 07:01:58.159521 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k8jnb" event={"ID":"85f0f1bc-04a6-497e-8781-6be917b5be98","Type":"ContainerDied","Data":"b6f00056009395ac11caf97e36e1d20564834c9b1f964a637ed32a6e12dad430"} Dec 10 07:01:58 crc kubenswrapper[4765]: I1210 07:01:58.619804 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-kw6d9" Dec 10 07:01:59 crc kubenswrapper[4765]: I1210 07:01:59.166857 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k8jnb" event={"ID":"85f0f1bc-04a6-497e-8781-6be917b5be98","Type":"ContainerStarted","Data":"2a466e334ffb12426aaa1812a9772c84e4580db06ff3b620e2c146a5badc06f4"} Dec 10 07:01:59 crc kubenswrapper[4765]: I1210 07:01:59.166894 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k8jnb" event={"ID":"85f0f1bc-04a6-497e-8781-6be917b5be98","Type":"ContainerStarted","Data":"d2639abc4c1bc05e593815dc8fdc1f0460a0b76fa76460da39204f082f3f1772"} Dec 10 07:01:59 crc kubenswrapper[4765]: I1210 07:01:59.166903 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k8jnb" event={"ID":"85f0f1bc-04a6-497e-8781-6be917b5be98","Type":"ContainerStarted","Data":"f3efa9470a7e19d08f738231555213a6f2bcc8ef9d463fe865a8195a4c4dcb37"} Dec 10 07:01:59 crc kubenswrapper[4765]: I1210 07:01:59.166911 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k8jnb" event={"ID":"85f0f1bc-04a6-497e-8781-6be917b5be98","Type":"ContainerStarted","Data":"b6422b86aa0158e2079c0a337af69c0f91389984985e491362376ec1838b92e2"} Dec 10 07:01:59 crc kubenswrapper[4765]: I1210 07:01:59.166920 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k8jnb" event={"ID":"85f0f1bc-04a6-497e-8781-6be917b5be98","Type":"ContainerStarted","Data":"6b54f86f747cfc5abea476fe267592f6956322d8afbc3969027b1a02e9c2119c"} Dec 10 07:01:59 crc kubenswrapper[4765]: I1210 07:01:59.166927 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k8jnb" event={"ID":"85f0f1bc-04a6-497e-8781-6be917b5be98","Type":"ContainerStarted","Data":"476cddd4ad15f2f283dac4f8ac5262883c6bc6705f38117335e98590e02eeaee"} Dec 10 07:01:59 crc kubenswrapper[4765]: I1210 07:01:59.168993 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:01:59 crc kubenswrapper[4765]: I1210 07:01:59.198384 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-k8jnb" podStartSLOduration=5.366674096 podStartE2EDuration="13.198369418s" podCreationTimestamp="2025-12-10 07:01:46 +0000 UTC" firstStartedPulling="2025-12-10 07:01:47.912787769 +0000 UTC m=+827.639453085" lastFinishedPulling="2025-12-10 07:01:55.744483091 +0000 UTC m=+835.471148407" observedRunningTime="2025-12-10 07:01:59.192933003 +0000 UTC m=+838.919598329" watchObservedRunningTime="2025-12-10 07:01:59.198369418 +0000 UTC m=+838.925034734" Dec 10 07:02:00 crc kubenswrapper[4765]: I1210 07:02:00.121353 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s"] Dec 10 07:02:00 crc kubenswrapper[4765]: I1210 07:02:00.123187 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s" Dec 10 07:02:00 crc kubenswrapper[4765]: I1210 07:02:00.125837 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 10 07:02:00 crc kubenswrapper[4765]: I1210 07:02:00.134174 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s"] Dec 10 07:02:00 crc kubenswrapper[4765]: I1210 07:02:00.324244 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmznv\" (UniqueName: \"kubernetes.io/projected/3c596d4a-2d7a-4297-834e-183a6a272ec5-kube-api-access-bmznv\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s\" (UID: \"3c596d4a-2d7a-4297-834e-183a6a272ec5\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s" Dec 10 07:02:00 crc kubenswrapper[4765]: I1210 07:02:00.324295 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3c596d4a-2d7a-4297-834e-183a6a272ec5-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s\" (UID: \"3c596d4a-2d7a-4297-834e-183a6a272ec5\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s" Dec 10 07:02:00 crc kubenswrapper[4765]: I1210 07:02:00.324511 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3c596d4a-2d7a-4297-834e-183a6a272ec5-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s\" (UID: \"3c596d4a-2d7a-4297-834e-183a6a272ec5\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s" Dec 10 07:02:00 crc kubenswrapper[4765]: I1210 07:02:00.425426 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmznv\" (UniqueName: \"kubernetes.io/projected/3c596d4a-2d7a-4297-834e-183a6a272ec5-kube-api-access-bmznv\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s\" (UID: \"3c596d4a-2d7a-4297-834e-183a6a272ec5\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s" Dec 10 07:02:00 crc kubenswrapper[4765]: I1210 07:02:00.425475 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3c596d4a-2d7a-4297-834e-183a6a272ec5-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s\" (UID: \"3c596d4a-2d7a-4297-834e-183a6a272ec5\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s" Dec 10 07:02:00 crc kubenswrapper[4765]: I1210 07:02:00.425540 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3c596d4a-2d7a-4297-834e-183a6a272ec5-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s\" (UID: \"3c596d4a-2d7a-4297-834e-183a6a272ec5\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s" Dec 10 07:02:00 crc kubenswrapper[4765]: I1210 07:02:00.425899 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3c596d4a-2d7a-4297-834e-183a6a272ec5-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s\" (UID: \"3c596d4a-2d7a-4297-834e-183a6a272ec5\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s" Dec 10 07:02:00 crc kubenswrapper[4765]: I1210 07:02:00.426154 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3c596d4a-2d7a-4297-834e-183a6a272ec5-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s\" (UID: \"3c596d4a-2d7a-4297-834e-183a6a272ec5\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s" Dec 10 07:02:00 crc kubenswrapper[4765]: I1210 07:02:00.442765 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmznv\" (UniqueName: \"kubernetes.io/projected/3c596d4a-2d7a-4297-834e-183a6a272ec5-kube-api-access-bmznv\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s\" (UID: \"3c596d4a-2d7a-4297-834e-183a6a272ec5\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s" Dec 10 07:02:00 crc kubenswrapper[4765]: I1210 07:02:00.743264 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 10 07:02:00 crc kubenswrapper[4765]: I1210 07:02:00.752166 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s" Dec 10 07:02:01 crc kubenswrapper[4765]: I1210 07:02:01.341412 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s"] Dec 10 07:02:01 crc kubenswrapper[4765]: W1210 07:02:01.352909 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3c596d4a_2d7a_4297_834e_183a6a272ec5.slice/crio-c2b5b60bd64898c9c9983a681d6b3b17780af0f9e7a15ad7239d1b73c88346aa WatchSource:0}: Error finding container c2b5b60bd64898c9c9983a681d6b3b17780af0f9e7a15ad7239d1b73c88346aa: Status 404 returned error can't find the container with id c2b5b60bd64898c9c9983a681d6b3b17780af0f9e7a15ad7239d1b73c88346aa Dec 10 07:02:02 crc kubenswrapper[4765]: I1210 07:02:02.190027 4765 generic.go:334] "Generic (PLEG): container finished" podID="3c596d4a-2d7a-4297-834e-183a6a272ec5" containerID="7dd018c44eee8ca6df7f97a510dc2e0e17bbf50f8c9539ee0a7e0faaac1f9e4f" exitCode=0 Dec 10 07:02:02 crc kubenswrapper[4765]: I1210 07:02:02.190077 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s" event={"ID":"3c596d4a-2d7a-4297-834e-183a6a272ec5","Type":"ContainerDied","Data":"7dd018c44eee8ca6df7f97a510dc2e0e17bbf50f8c9539ee0a7e0faaac1f9e4f"} Dec 10 07:02:02 crc kubenswrapper[4765]: I1210 07:02:02.190141 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s" event={"ID":"3c596d4a-2d7a-4297-834e-183a6a272ec5","Type":"ContainerStarted","Data":"c2b5b60bd64898c9c9983a681d6b3b17780af0f9e7a15ad7239d1b73c88346aa"} Dec 10 07:02:02 crc kubenswrapper[4765]: I1210 07:02:02.626920 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:02:02 crc kubenswrapper[4765]: I1210 07:02:02.668623 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:02:06 crc kubenswrapper[4765]: I1210 07:02:06.332643 4765 generic.go:334] "Generic (PLEG): container finished" podID="3c596d4a-2d7a-4297-834e-183a6a272ec5" containerID="c94b58cf9656b21fed58aace9ec590310544dcee86fd000f51a2d05cc5fe21d7" exitCode=0 Dec 10 07:02:06 crc kubenswrapper[4765]: I1210 07:02:06.332703 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s" event={"ID":"3c596d4a-2d7a-4297-834e-183a6a272ec5","Type":"ContainerDied","Data":"c94b58cf9656b21fed58aace9ec590310544dcee86fd000f51a2d05cc5fe21d7"} Dec 10 07:02:07 crc kubenswrapper[4765]: I1210 07:02:07.341374 4765 generic.go:334] "Generic (PLEG): container finished" podID="3c596d4a-2d7a-4297-834e-183a6a272ec5" containerID="f57fbe1732f7a55623d75dbcc09ccb829cba5b49601a9b1b71073c33d8a115c0" exitCode=0 Dec 10 07:02:07 crc kubenswrapper[4765]: I1210 07:02:07.341451 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s" event={"ID":"3c596d4a-2d7a-4297-834e-183a6a272ec5","Type":"ContainerDied","Data":"f57fbe1732f7a55623d75dbcc09ccb829cba5b49601a9b1b71073c33d8a115c0"} Dec 10 07:02:07 crc kubenswrapper[4765]: I1210 07:02:07.623760 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-p2jnh" Dec 10 07:02:08 crc kubenswrapper[4765]: I1210 07:02:08.590234 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s" Dec 10 07:02:08 crc kubenswrapper[4765]: I1210 07:02:08.742899 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmznv\" (UniqueName: \"kubernetes.io/projected/3c596d4a-2d7a-4297-834e-183a6a272ec5-kube-api-access-bmznv\") pod \"3c596d4a-2d7a-4297-834e-183a6a272ec5\" (UID: \"3c596d4a-2d7a-4297-834e-183a6a272ec5\") " Dec 10 07:02:08 crc kubenswrapper[4765]: I1210 07:02:08.742960 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3c596d4a-2d7a-4297-834e-183a6a272ec5-util\") pod \"3c596d4a-2d7a-4297-834e-183a6a272ec5\" (UID: \"3c596d4a-2d7a-4297-834e-183a6a272ec5\") " Dec 10 07:02:08 crc kubenswrapper[4765]: I1210 07:02:08.743021 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3c596d4a-2d7a-4297-834e-183a6a272ec5-bundle\") pod \"3c596d4a-2d7a-4297-834e-183a6a272ec5\" (UID: \"3c596d4a-2d7a-4297-834e-183a6a272ec5\") " Dec 10 07:02:08 crc kubenswrapper[4765]: I1210 07:02:08.743941 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c596d4a-2d7a-4297-834e-183a6a272ec5-bundle" (OuterVolumeSpecName: "bundle") pod "3c596d4a-2d7a-4297-834e-183a6a272ec5" (UID: "3c596d4a-2d7a-4297-834e-183a6a272ec5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:02:08 crc kubenswrapper[4765]: I1210 07:02:08.748045 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c596d4a-2d7a-4297-834e-183a6a272ec5-kube-api-access-bmznv" (OuterVolumeSpecName: "kube-api-access-bmznv") pod "3c596d4a-2d7a-4297-834e-183a6a272ec5" (UID: "3c596d4a-2d7a-4297-834e-183a6a272ec5"). InnerVolumeSpecName "kube-api-access-bmznv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:02:08 crc kubenswrapper[4765]: I1210 07:02:08.752633 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c596d4a-2d7a-4297-834e-183a6a272ec5-util" (OuterVolumeSpecName: "util") pod "3c596d4a-2d7a-4297-834e-183a6a272ec5" (UID: "3c596d4a-2d7a-4297-834e-183a6a272ec5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:02:08 crc kubenswrapper[4765]: I1210 07:02:08.844257 4765 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3c596d4a-2d7a-4297-834e-183a6a272ec5-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:02:08 crc kubenswrapper[4765]: I1210 07:02:08.844303 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmznv\" (UniqueName: \"kubernetes.io/projected/3c596d4a-2d7a-4297-834e-183a6a272ec5-kube-api-access-bmznv\") on node \"crc\" DevicePath \"\"" Dec 10 07:02:08 crc kubenswrapper[4765]: I1210 07:02:08.844316 4765 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3c596d4a-2d7a-4297-834e-183a6a272ec5-util\") on node \"crc\" DevicePath \"\"" Dec 10 07:02:09 crc kubenswrapper[4765]: I1210 07:02:09.354294 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s" event={"ID":"3c596d4a-2d7a-4297-834e-183a6a272ec5","Type":"ContainerDied","Data":"c2b5b60bd64898c9c9983a681d6b3b17780af0f9e7a15ad7239d1b73c88346aa"} Dec 10 07:02:09 crc kubenswrapper[4765]: I1210 07:02:09.354338 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2b5b60bd64898c9c9983a681d6b3b17780af0f9e7a15ad7239d1b73c88346aa" Dec 10 07:02:09 crc kubenswrapper[4765]: I1210 07:02:09.354407 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s" Dec 10 07:02:13 crc kubenswrapper[4765]: I1210 07:02:13.093076 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-dhjz9"] Dec 10 07:02:13 crc kubenswrapper[4765]: E1210 07:02:13.094093 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c596d4a-2d7a-4297-834e-183a6a272ec5" containerName="util" Dec 10 07:02:13 crc kubenswrapper[4765]: I1210 07:02:13.094109 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c596d4a-2d7a-4297-834e-183a6a272ec5" containerName="util" Dec 10 07:02:13 crc kubenswrapper[4765]: E1210 07:02:13.094130 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c596d4a-2d7a-4297-834e-183a6a272ec5" containerName="extract" Dec 10 07:02:13 crc kubenswrapper[4765]: I1210 07:02:13.094137 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c596d4a-2d7a-4297-834e-183a6a272ec5" containerName="extract" Dec 10 07:02:13 crc kubenswrapper[4765]: E1210 07:02:13.094153 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c596d4a-2d7a-4297-834e-183a6a272ec5" containerName="pull" Dec 10 07:02:13 crc kubenswrapper[4765]: I1210 07:02:13.094176 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c596d4a-2d7a-4297-834e-183a6a272ec5" containerName="pull" Dec 10 07:02:13 crc kubenswrapper[4765]: I1210 07:02:13.094301 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c596d4a-2d7a-4297-834e-183a6a272ec5" containerName="extract" Dec 10 07:02:13 crc kubenswrapper[4765]: I1210 07:02:13.094864 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-dhjz9" Dec 10 07:02:13 crc kubenswrapper[4765]: I1210 07:02:13.097117 4765 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-rxvws" Dec 10 07:02:13 crc kubenswrapper[4765]: I1210 07:02:13.097518 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Dec 10 07:02:13 crc kubenswrapper[4765]: I1210 07:02:13.098633 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Dec 10 07:02:13 crc kubenswrapper[4765]: I1210 07:02:13.113811 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-dhjz9"] Dec 10 07:02:13 crc kubenswrapper[4765]: I1210 07:02:13.247321 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/33b73d0e-d9fd-4c65-b4c4-726fd8ce87eb-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-dhjz9\" (UID: \"33b73d0e-d9fd-4c65-b4c4-726fd8ce87eb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-dhjz9" Dec 10 07:02:13 crc kubenswrapper[4765]: I1210 07:02:13.247383 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8z72\" (UniqueName: \"kubernetes.io/projected/33b73d0e-d9fd-4c65-b4c4-726fd8ce87eb-kube-api-access-b8z72\") pod \"cert-manager-operator-controller-manager-64cf6dff88-dhjz9\" (UID: \"33b73d0e-d9fd-4c65-b4c4-726fd8ce87eb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-dhjz9" Dec 10 07:02:13 crc kubenswrapper[4765]: I1210 07:02:13.348797 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/33b73d0e-d9fd-4c65-b4c4-726fd8ce87eb-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-dhjz9\" (UID: \"33b73d0e-d9fd-4c65-b4c4-726fd8ce87eb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-dhjz9" Dec 10 07:02:13 crc kubenswrapper[4765]: I1210 07:02:13.348860 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8z72\" (UniqueName: \"kubernetes.io/projected/33b73d0e-d9fd-4c65-b4c4-726fd8ce87eb-kube-api-access-b8z72\") pod \"cert-manager-operator-controller-manager-64cf6dff88-dhjz9\" (UID: \"33b73d0e-d9fd-4c65-b4c4-726fd8ce87eb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-dhjz9" Dec 10 07:02:13 crc kubenswrapper[4765]: I1210 07:02:13.349653 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/33b73d0e-d9fd-4c65-b4c4-726fd8ce87eb-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-dhjz9\" (UID: \"33b73d0e-d9fd-4c65-b4c4-726fd8ce87eb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-dhjz9" Dec 10 07:02:13 crc kubenswrapper[4765]: I1210 07:02:13.373965 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8z72\" (UniqueName: \"kubernetes.io/projected/33b73d0e-d9fd-4c65-b4c4-726fd8ce87eb-kube-api-access-b8z72\") pod \"cert-manager-operator-controller-manager-64cf6dff88-dhjz9\" (UID: \"33b73d0e-d9fd-4c65-b4c4-726fd8ce87eb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-dhjz9" Dec 10 07:02:13 crc kubenswrapper[4765]: I1210 07:02:13.418912 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-dhjz9" Dec 10 07:02:14 crc kubenswrapper[4765]: I1210 07:02:14.061901 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-dhjz9"] Dec 10 07:02:14 crc kubenswrapper[4765]: W1210 07:02:14.068392 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod33b73d0e_d9fd_4c65_b4c4_726fd8ce87eb.slice/crio-44c251a1968da2a4151c90e65155f7125050cbcf23659ad996cbc16ea8a0ddc4 WatchSource:0}: Error finding container 44c251a1968da2a4151c90e65155f7125050cbcf23659ad996cbc16ea8a0ddc4: Status 404 returned error can't find the container with id 44c251a1968da2a4151c90e65155f7125050cbcf23659ad996cbc16ea8a0ddc4 Dec 10 07:02:14 crc kubenswrapper[4765]: I1210 07:02:14.523277 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-dhjz9" event={"ID":"33b73d0e-d9fd-4c65-b4c4-726fd8ce87eb","Type":"ContainerStarted","Data":"44c251a1968da2a4151c90e65155f7125050cbcf23659ad996cbc16ea8a0ddc4"} Dec 10 07:02:17 crc kubenswrapper[4765]: I1210 07:02:17.635748 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-k8jnb" Dec 10 07:02:27 crc kubenswrapper[4765]: I1210 07:02:27.814669 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-dhjz9" event={"ID":"33b73d0e-d9fd-4c65-b4c4-726fd8ce87eb","Type":"ContainerStarted","Data":"c2a82d5b2380eee2e1745a84de0cda59e895dd59dd94ab00a222f5e99a97a367"} Dec 10 07:02:27 crc kubenswrapper[4765]: I1210 07:02:27.835218 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-dhjz9" podStartSLOduration=1.412230789 podStartE2EDuration="14.835201043s" podCreationTimestamp="2025-12-10 07:02:13 +0000 UTC" firstStartedPulling="2025-12-10 07:02:14.070859814 +0000 UTC m=+853.797525130" lastFinishedPulling="2025-12-10 07:02:27.493830068 +0000 UTC m=+867.220495384" observedRunningTime="2025-12-10 07:02:27.833164815 +0000 UTC m=+867.559830161" watchObservedRunningTime="2025-12-10 07:02:27.835201043 +0000 UTC m=+867.561866359" Dec 10 07:02:30 crc kubenswrapper[4765]: I1210 07:02:30.425047 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-5jbtc"] Dec 10 07:02:30 crc kubenswrapper[4765]: I1210 07:02:30.425932 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-5jbtc" Dec 10 07:02:30 crc kubenswrapper[4765]: I1210 07:02:30.427607 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 10 07:02:30 crc kubenswrapper[4765]: I1210 07:02:30.428048 4765 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-lf9g2" Dec 10 07:02:30 crc kubenswrapper[4765]: I1210 07:02:30.428940 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 10 07:02:30 crc kubenswrapper[4765]: I1210 07:02:30.435732 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-5jbtc"] Dec 10 07:02:30 crc kubenswrapper[4765]: I1210 07:02:30.577122 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8659183f-8540-44c3-9040-f8b2490375cb-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-5jbtc\" (UID: \"8659183f-8540-44c3-9040-f8b2490375cb\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-5jbtc" Dec 10 07:02:30 crc kubenswrapper[4765]: I1210 07:02:30.577417 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glckz\" (UniqueName: \"kubernetes.io/projected/8659183f-8540-44c3-9040-f8b2490375cb-kube-api-access-glckz\") pod \"cert-manager-webhook-f4fb5df64-5jbtc\" (UID: \"8659183f-8540-44c3-9040-f8b2490375cb\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-5jbtc" Dec 10 07:02:30 crc kubenswrapper[4765]: I1210 07:02:30.678697 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8659183f-8540-44c3-9040-f8b2490375cb-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-5jbtc\" (UID: \"8659183f-8540-44c3-9040-f8b2490375cb\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-5jbtc" Dec 10 07:02:30 crc kubenswrapper[4765]: I1210 07:02:30.678769 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glckz\" (UniqueName: \"kubernetes.io/projected/8659183f-8540-44c3-9040-f8b2490375cb-kube-api-access-glckz\") pod \"cert-manager-webhook-f4fb5df64-5jbtc\" (UID: \"8659183f-8540-44c3-9040-f8b2490375cb\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-5jbtc" Dec 10 07:02:30 crc kubenswrapper[4765]: I1210 07:02:30.704297 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8659183f-8540-44c3-9040-f8b2490375cb-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-5jbtc\" (UID: \"8659183f-8540-44c3-9040-f8b2490375cb\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-5jbtc" Dec 10 07:02:30 crc kubenswrapper[4765]: I1210 07:02:30.704950 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glckz\" (UniqueName: \"kubernetes.io/projected/8659183f-8540-44c3-9040-f8b2490375cb-kube-api-access-glckz\") pod \"cert-manager-webhook-f4fb5df64-5jbtc\" (UID: \"8659183f-8540-44c3-9040-f8b2490375cb\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-5jbtc" Dec 10 07:02:30 crc kubenswrapper[4765]: I1210 07:02:30.803473 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-5jbtc" Dec 10 07:02:31 crc kubenswrapper[4765]: I1210 07:02:31.892655 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-5jbtc"] Dec 10 07:02:32 crc kubenswrapper[4765]: I1210 07:02:32.868950 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-5jbtc" event={"ID":"8659183f-8540-44c3-9040-f8b2490375cb","Type":"ContainerStarted","Data":"9d95590d4ac7e81dc09731ee9875248a1bbb5509eb3999fd3c321f125b591850"} Dec 10 07:02:32 crc kubenswrapper[4765]: I1210 07:02:32.991274 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-8g9t8"] Dec 10 07:02:32 crc kubenswrapper[4765]: I1210 07:02:32.992364 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-8g9t8" Dec 10 07:02:33 crc kubenswrapper[4765]: I1210 07:02:33.002413 4765 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-8wtxc" Dec 10 07:02:33 crc kubenswrapper[4765]: I1210 07:02:33.007987 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-8g9t8"] Dec 10 07:02:33 crc kubenswrapper[4765]: I1210 07:02:33.192328 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8g8x\" (UniqueName: \"kubernetes.io/projected/b6f56e46-2706-42ef-bfd1-249520fb4036-kube-api-access-q8g8x\") pod \"cert-manager-cainjector-855d9ccff4-8g9t8\" (UID: \"b6f56e46-2706-42ef-bfd1-249520fb4036\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-8g9t8" Dec 10 07:02:33 crc kubenswrapper[4765]: I1210 07:02:33.192521 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b6f56e46-2706-42ef-bfd1-249520fb4036-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-8g9t8\" (UID: \"b6f56e46-2706-42ef-bfd1-249520fb4036\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-8g9t8" Dec 10 07:02:33 crc kubenswrapper[4765]: I1210 07:02:33.293914 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8g8x\" (UniqueName: \"kubernetes.io/projected/b6f56e46-2706-42ef-bfd1-249520fb4036-kube-api-access-q8g8x\") pod \"cert-manager-cainjector-855d9ccff4-8g9t8\" (UID: \"b6f56e46-2706-42ef-bfd1-249520fb4036\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-8g9t8" Dec 10 07:02:33 crc kubenswrapper[4765]: I1210 07:02:33.294389 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b6f56e46-2706-42ef-bfd1-249520fb4036-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-8g9t8\" (UID: \"b6f56e46-2706-42ef-bfd1-249520fb4036\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-8g9t8" Dec 10 07:02:33 crc kubenswrapper[4765]: I1210 07:02:33.313338 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8g8x\" (UniqueName: \"kubernetes.io/projected/b6f56e46-2706-42ef-bfd1-249520fb4036-kube-api-access-q8g8x\") pod \"cert-manager-cainjector-855d9ccff4-8g9t8\" (UID: \"b6f56e46-2706-42ef-bfd1-249520fb4036\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-8g9t8" Dec 10 07:02:33 crc kubenswrapper[4765]: I1210 07:02:33.323889 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b6f56e46-2706-42ef-bfd1-249520fb4036-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-8g9t8\" (UID: \"b6f56e46-2706-42ef-bfd1-249520fb4036\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-8g9t8" Dec 10 07:02:33 crc kubenswrapper[4765]: I1210 07:02:33.623451 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-8g9t8" Dec 10 07:02:33 crc kubenswrapper[4765]: I1210 07:02:33.855735 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-8g9t8"] Dec 10 07:02:33 crc kubenswrapper[4765]: I1210 07:02:33.880416 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-8g9t8" event={"ID":"b6f56e46-2706-42ef-bfd1-249520fb4036","Type":"ContainerStarted","Data":"a30db6a7207786ee1112f472d4e398bdc0e109fff0b5660a652b284ae83cf35f"} Dec 10 07:02:39 crc kubenswrapper[4765]: I1210 07:02:39.215543 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="metallb-system/frr-k8s-k8jnb" podUID="85f0f1bc-04a6-497e-8781-6be917b5be98" containerName="frr" probeResult="failure" output="Get \"http://127.0.0.1:7573/livez\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 07:02:41 crc kubenswrapper[4765]: I1210 07:02:41.284590 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-6zdfw"] Dec 10 07:02:41 crc kubenswrapper[4765]: I1210 07:02:41.286151 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-6zdfw" Dec 10 07:02:41 crc kubenswrapper[4765]: I1210 07:02:41.289662 4765 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-tvwqb" Dec 10 07:02:41 crc kubenswrapper[4765]: I1210 07:02:41.303368 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-6zdfw"] Dec 10 07:02:41 crc kubenswrapper[4765]: I1210 07:02:41.376900 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f2b749dd-e42d-417e-ae85-f66e905aba81-bound-sa-token\") pod \"cert-manager-86cb77c54b-6zdfw\" (UID: \"f2b749dd-e42d-417e-ae85-f66e905aba81\") " pod="cert-manager/cert-manager-86cb77c54b-6zdfw" Dec 10 07:02:41 crc kubenswrapper[4765]: I1210 07:02:41.376993 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wvxl\" (UniqueName: \"kubernetes.io/projected/f2b749dd-e42d-417e-ae85-f66e905aba81-kube-api-access-6wvxl\") pod \"cert-manager-86cb77c54b-6zdfw\" (UID: \"f2b749dd-e42d-417e-ae85-f66e905aba81\") " pod="cert-manager/cert-manager-86cb77c54b-6zdfw" Dec 10 07:02:41 crc kubenswrapper[4765]: I1210 07:02:41.477847 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wvxl\" (UniqueName: \"kubernetes.io/projected/f2b749dd-e42d-417e-ae85-f66e905aba81-kube-api-access-6wvxl\") pod \"cert-manager-86cb77c54b-6zdfw\" (UID: \"f2b749dd-e42d-417e-ae85-f66e905aba81\") " pod="cert-manager/cert-manager-86cb77c54b-6zdfw" Dec 10 07:02:41 crc kubenswrapper[4765]: I1210 07:02:41.477926 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f2b749dd-e42d-417e-ae85-f66e905aba81-bound-sa-token\") pod \"cert-manager-86cb77c54b-6zdfw\" (UID: \"f2b749dd-e42d-417e-ae85-f66e905aba81\") " pod="cert-manager/cert-manager-86cb77c54b-6zdfw" Dec 10 07:02:41 crc kubenswrapper[4765]: I1210 07:02:41.495587 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f2b749dd-e42d-417e-ae85-f66e905aba81-bound-sa-token\") pod \"cert-manager-86cb77c54b-6zdfw\" (UID: \"f2b749dd-e42d-417e-ae85-f66e905aba81\") " pod="cert-manager/cert-manager-86cb77c54b-6zdfw" Dec 10 07:02:41 crc kubenswrapper[4765]: I1210 07:02:41.495730 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wvxl\" (UniqueName: \"kubernetes.io/projected/f2b749dd-e42d-417e-ae85-f66e905aba81-kube-api-access-6wvxl\") pod \"cert-manager-86cb77c54b-6zdfw\" (UID: \"f2b749dd-e42d-417e-ae85-f66e905aba81\") " pod="cert-manager/cert-manager-86cb77c54b-6zdfw" Dec 10 07:02:41 crc kubenswrapper[4765]: I1210 07:02:41.667486 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-6zdfw" Dec 10 07:02:44 crc kubenswrapper[4765]: I1210 07:02:44.255549 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-8g9t8" event={"ID":"b6f56e46-2706-42ef-bfd1-249520fb4036","Type":"ContainerStarted","Data":"56ba9d80e8d9886f1087f34b9d748020b9c4d3d739582670ec56772c9ee8f0a6"} Dec 10 07:02:44 crc kubenswrapper[4765]: I1210 07:02:44.257653 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-5jbtc" event={"ID":"8659183f-8540-44c3-9040-f8b2490375cb","Type":"ContainerStarted","Data":"88a70eb1da13a56f25b0d4282b18fd3670e029112642b466d4a29230f598458c"} Dec 10 07:02:44 crc kubenswrapper[4765]: I1210 07:02:44.258308 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-5jbtc" Dec 10 07:02:44 crc kubenswrapper[4765]: I1210 07:02:44.303021 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-8g9t8" podStartSLOduration=2.200195931 podStartE2EDuration="12.302992362s" podCreationTimestamp="2025-12-10 07:02:32 +0000 UTC" firstStartedPulling="2025-12-10 07:02:33.86741126 +0000 UTC m=+873.594076576" lastFinishedPulling="2025-12-10 07:02:43.970207691 +0000 UTC m=+883.696873007" observedRunningTime="2025-12-10 07:02:44.273402618 +0000 UTC m=+884.000067934" watchObservedRunningTime="2025-12-10 07:02:44.302992362 +0000 UTC m=+884.029657678" Dec 10 07:02:44 crc kubenswrapper[4765]: I1210 07:02:44.308009 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-5jbtc" podStartSLOduration=2.262359917 podStartE2EDuration="14.307988564s" podCreationTimestamp="2025-12-10 07:02:30 +0000 UTC" firstStartedPulling="2025-12-10 07:02:31.899721415 +0000 UTC m=+871.626386731" lastFinishedPulling="2025-12-10 07:02:43.945350062 +0000 UTC m=+883.672015378" observedRunningTime="2025-12-10 07:02:44.30152079 +0000 UTC m=+884.028186116" watchObservedRunningTime="2025-12-10 07:02:44.307988564 +0000 UTC m=+884.034653880" Dec 10 07:02:44 crc kubenswrapper[4765]: I1210 07:02:44.374693 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-6zdfw"] Dec 10 07:02:45 crc kubenswrapper[4765]: I1210 07:02:45.267206 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-6zdfw" event={"ID":"f2b749dd-e42d-417e-ae85-f66e905aba81","Type":"ContainerStarted","Data":"259f716a83635def112af0f0da9032c0dff66216118ac822deb35a20856f5e28"} Dec 10 07:02:45 crc kubenswrapper[4765]: I1210 07:02:45.267530 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-6zdfw" event={"ID":"f2b749dd-e42d-417e-ae85-f66e905aba81","Type":"ContainerStarted","Data":"e92f006cf4f1eae92dcdbab6b4caef816b8dcefec485939232bbbad80593badb"} Dec 10 07:02:45 crc kubenswrapper[4765]: I1210 07:02:45.285681 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-6zdfw" podStartSLOduration=4.285665375 podStartE2EDuration="4.285665375s" podCreationTimestamp="2025-12-10 07:02:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:02:45.281220008 +0000 UTC m=+885.007885324" watchObservedRunningTime="2025-12-10 07:02:45.285665375 +0000 UTC m=+885.012330691" Dec 10 07:02:50 crc kubenswrapper[4765]: I1210 07:02:50.807557 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-5jbtc" Dec 10 07:02:54 crc kubenswrapper[4765]: I1210 07:02:54.170941 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-q95xs"] Dec 10 07:02:54 crc kubenswrapper[4765]: I1210 07:02:54.172762 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-q95xs" Dec 10 07:02:54 crc kubenswrapper[4765]: I1210 07:02:54.175433 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 10 07:02:54 crc kubenswrapper[4765]: I1210 07:02:54.175589 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 10 07:02:54 crc kubenswrapper[4765]: I1210 07:02:54.176722 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-sc9k4" Dec 10 07:02:54 crc kubenswrapper[4765]: I1210 07:02:54.218268 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-q95xs"] Dec 10 07:02:54 crc kubenswrapper[4765]: I1210 07:02:54.369398 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2zzr\" (UniqueName: \"kubernetes.io/projected/79bccbed-d859-4312-933d-ab156425cb73-kube-api-access-q2zzr\") pod \"openstack-operator-index-q95xs\" (UID: \"79bccbed-d859-4312-933d-ab156425cb73\") " pod="openstack-operators/openstack-operator-index-q95xs" Dec 10 07:02:54 crc kubenswrapper[4765]: I1210 07:02:54.471313 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2zzr\" (UniqueName: \"kubernetes.io/projected/79bccbed-d859-4312-933d-ab156425cb73-kube-api-access-q2zzr\") pod \"openstack-operator-index-q95xs\" (UID: \"79bccbed-d859-4312-933d-ab156425cb73\") " pod="openstack-operators/openstack-operator-index-q95xs" Dec 10 07:02:54 crc kubenswrapper[4765]: I1210 07:02:54.492801 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2zzr\" (UniqueName: \"kubernetes.io/projected/79bccbed-d859-4312-933d-ab156425cb73-kube-api-access-q2zzr\") pod \"openstack-operator-index-q95xs\" (UID: \"79bccbed-d859-4312-933d-ab156425cb73\") " pod="openstack-operators/openstack-operator-index-q95xs" Dec 10 07:02:54 crc kubenswrapper[4765]: I1210 07:02:54.527793 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-q95xs" Dec 10 07:02:54 crc kubenswrapper[4765]: I1210 07:02:54.980237 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-q95xs"] Dec 10 07:02:55 crc kubenswrapper[4765]: I1210 07:02:55.319479 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-q95xs" event={"ID":"79bccbed-d859-4312-933d-ab156425cb73","Type":"ContainerStarted","Data":"afdd06ba14926b4cbd1d0632926550a23d8febfa68737d69ce425915beb6ffde"} Dec 10 07:02:56 crc kubenswrapper[4765]: I1210 07:02:56.936710 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-q95xs"] Dec 10 07:02:57 crc kubenswrapper[4765]: I1210 07:02:57.542437 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-f8s54"] Dec 10 07:02:57 crc kubenswrapper[4765]: I1210 07:02:57.543357 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-f8s54" Dec 10 07:02:57 crc kubenswrapper[4765]: I1210 07:02:57.554353 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-f8s54"] Dec 10 07:02:57 crc kubenswrapper[4765]: I1210 07:02:57.611923 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4f6tc\" (UniqueName: \"kubernetes.io/projected/23a1b29f-1c77-47e7-ab57-d0539cdbdec0-kube-api-access-4f6tc\") pod \"openstack-operator-index-f8s54\" (UID: \"23a1b29f-1c77-47e7-ab57-d0539cdbdec0\") " pod="openstack-operators/openstack-operator-index-f8s54" Dec 10 07:02:57 crc kubenswrapper[4765]: I1210 07:02:57.712981 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4f6tc\" (UniqueName: \"kubernetes.io/projected/23a1b29f-1c77-47e7-ab57-d0539cdbdec0-kube-api-access-4f6tc\") pod \"openstack-operator-index-f8s54\" (UID: \"23a1b29f-1c77-47e7-ab57-d0539cdbdec0\") " pod="openstack-operators/openstack-operator-index-f8s54" Dec 10 07:02:57 crc kubenswrapper[4765]: I1210 07:02:57.740842 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4f6tc\" (UniqueName: \"kubernetes.io/projected/23a1b29f-1c77-47e7-ab57-d0539cdbdec0-kube-api-access-4f6tc\") pod \"openstack-operator-index-f8s54\" (UID: \"23a1b29f-1c77-47e7-ab57-d0539cdbdec0\") " pod="openstack-operators/openstack-operator-index-f8s54" Dec 10 07:02:57 crc kubenswrapper[4765]: I1210 07:02:57.863597 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-f8s54" Dec 10 07:02:58 crc kubenswrapper[4765]: I1210 07:02:58.619935 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-f8s54"] Dec 10 07:02:59 crc kubenswrapper[4765]: I1210 07:02:59.346246 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-f8s54" event={"ID":"23a1b29f-1c77-47e7-ab57-d0539cdbdec0","Type":"ContainerStarted","Data":"bb17e8c1f44a5d839ce9d0d318512612645f76f277a2bd1d9ab57c1892ae62ab"} Dec 10 07:03:01 crc kubenswrapper[4765]: I1210 07:03:01.363171 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-f8s54" event={"ID":"23a1b29f-1c77-47e7-ab57-d0539cdbdec0","Type":"ContainerStarted","Data":"390f110046980659c212315d7adb34cc58edd5fbc5e2f1d3f942f2f741964f9a"} Dec 10 07:03:01 crc kubenswrapper[4765]: I1210 07:03:01.367383 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-q95xs" event={"ID":"79bccbed-d859-4312-933d-ab156425cb73","Type":"ContainerStarted","Data":"d7a7fcab465a7519eee4e644cbc012199919e7e0e6d53f3c1133531a828af663"} Dec 10 07:03:01 crc kubenswrapper[4765]: I1210 07:03:01.367597 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-q95xs" podUID="79bccbed-d859-4312-933d-ab156425cb73" containerName="registry-server" containerID="cri-o://d7a7fcab465a7519eee4e644cbc012199919e7e0e6d53f3c1133531a828af663" gracePeriod=2 Dec 10 07:03:01 crc kubenswrapper[4765]: I1210 07:03:01.394012 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-f8s54" podStartSLOduration=2.3757605059999998 podStartE2EDuration="4.393989401s" podCreationTimestamp="2025-12-10 07:02:57 +0000 UTC" firstStartedPulling="2025-12-10 07:02:58.62197677 +0000 UTC m=+898.348642106" lastFinishedPulling="2025-12-10 07:03:00.640205685 +0000 UTC m=+900.366871001" observedRunningTime="2025-12-10 07:03:01.388414752 +0000 UTC m=+901.115080088" watchObservedRunningTime="2025-12-10 07:03:01.393989401 +0000 UTC m=+901.120654717" Dec 10 07:03:01 crc kubenswrapper[4765]: I1210 07:03:01.406830 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-q95xs" podStartSLOduration=2.077827335 podStartE2EDuration="7.406810347s" podCreationTimestamp="2025-12-10 07:02:54 +0000 UTC" firstStartedPulling="2025-12-10 07:02:55.006604366 +0000 UTC m=+894.733269682" lastFinishedPulling="2025-12-10 07:03:00.335587368 +0000 UTC m=+900.062252694" observedRunningTime="2025-12-10 07:03:01.40165711 +0000 UTC m=+901.128322426" watchObservedRunningTime="2025-12-10 07:03:01.406810347 +0000 UTC m=+901.133475663" Dec 10 07:03:01 crc kubenswrapper[4765]: I1210 07:03:01.854955 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-q95xs" Dec 10 07:03:01 crc kubenswrapper[4765]: I1210 07:03:01.977891 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2zzr\" (UniqueName: \"kubernetes.io/projected/79bccbed-d859-4312-933d-ab156425cb73-kube-api-access-q2zzr\") pod \"79bccbed-d859-4312-933d-ab156425cb73\" (UID: \"79bccbed-d859-4312-933d-ab156425cb73\") " Dec 10 07:03:01 crc kubenswrapper[4765]: I1210 07:03:01.983228 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79bccbed-d859-4312-933d-ab156425cb73-kube-api-access-q2zzr" (OuterVolumeSpecName: "kube-api-access-q2zzr") pod "79bccbed-d859-4312-933d-ab156425cb73" (UID: "79bccbed-d859-4312-933d-ab156425cb73"). InnerVolumeSpecName "kube-api-access-q2zzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:03:02 crc kubenswrapper[4765]: I1210 07:03:02.079476 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2zzr\" (UniqueName: \"kubernetes.io/projected/79bccbed-d859-4312-933d-ab156425cb73-kube-api-access-q2zzr\") on node \"crc\" DevicePath \"\"" Dec 10 07:03:02 crc kubenswrapper[4765]: I1210 07:03:02.373741 4765 generic.go:334] "Generic (PLEG): container finished" podID="79bccbed-d859-4312-933d-ab156425cb73" containerID="d7a7fcab465a7519eee4e644cbc012199919e7e0e6d53f3c1133531a828af663" exitCode=0 Dec 10 07:03:02 crc kubenswrapper[4765]: I1210 07:03:02.373793 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-q95xs" Dec 10 07:03:02 crc kubenswrapper[4765]: I1210 07:03:02.373845 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-q95xs" event={"ID":"79bccbed-d859-4312-933d-ab156425cb73","Type":"ContainerDied","Data":"d7a7fcab465a7519eee4e644cbc012199919e7e0e6d53f3c1133531a828af663"} Dec 10 07:03:02 crc kubenswrapper[4765]: I1210 07:03:02.373886 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-q95xs" event={"ID":"79bccbed-d859-4312-933d-ab156425cb73","Type":"ContainerDied","Data":"afdd06ba14926b4cbd1d0632926550a23d8febfa68737d69ce425915beb6ffde"} Dec 10 07:03:02 crc kubenswrapper[4765]: I1210 07:03:02.373903 4765 scope.go:117] "RemoveContainer" containerID="d7a7fcab465a7519eee4e644cbc012199919e7e0e6d53f3c1133531a828af663" Dec 10 07:03:02 crc kubenswrapper[4765]: I1210 07:03:02.396070 4765 scope.go:117] "RemoveContainer" containerID="d7a7fcab465a7519eee4e644cbc012199919e7e0e6d53f3c1133531a828af663" Dec 10 07:03:02 crc kubenswrapper[4765]: E1210 07:03:02.396643 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7a7fcab465a7519eee4e644cbc012199919e7e0e6d53f3c1133531a828af663\": container with ID starting with d7a7fcab465a7519eee4e644cbc012199919e7e0e6d53f3c1133531a828af663 not found: ID does not exist" containerID="d7a7fcab465a7519eee4e644cbc012199919e7e0e6d53f3c1133531a828af663" Dec 10 07:03:02 crc kubenswrapper[4765]: I1210 07:03:02.396685 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7a7fcab465a7519eee4e644cbc012199919e7e0e6d53f3c1133531a828af663"} err="failed to get container status \"d7a7fcab465a7519eee4e644cbc012199919e7e0e6d53f3c1133531a828af663\": rpc error: code = NotFound desc = could not find container \"d7a7fcab465a7519eee4e644cbc012199919e7e0e6d53f3c1133531a828af663\": container with ID starting with d7a7fcab465a7519eee4e644cbc012199919e7e0e6d53f3c1133531a828af663 not found: ID does not exist" Dec 10 07:03:02 crc kubenswrapper[4765]: I1210 07:03:02.398529 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-q95xs"] Dec 10 07:03:02 crc kubenswrapper[4765]: I1210 07:03:02.404932 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-q95xs"] Dec 10 07:03:02 crc kubenswrapper[4765]: I1210 07:03:02.596238 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79bccbed-d859-4312-933d-ab156425cb73" path="/var/lib/kubelet/pods/79bccbed-d859-4312-933d-ab156425cb73/volumes" Dec 10 07:03:07 crc kubenswrapper[4765]: I1210 07:03:07.865026 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-f8s54" Dec 10 07:03:07 crc kubenswrapper[4765]: I1210 07:03:07.866008 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-f8s54" Dec 10 07:03:07 crc kubenswrapper[4765]: I1210 07:03:07.899239 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-f8s54" Dec 10 07:03:08 crc kubenswrapper[4765]: I1210 07:03:08.436949 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-f8s54" Dec 10 07:03:15 crc kubenswrapper[4765]: I1210 07:03:15.117375 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57"] Dec 10 07:03:15 crc kubenswrapper[4765]: E1210 07:03:15.119208 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79bccbed-d859-4312-933d-ab156425cb73" containerName="registry-server" Dec 10 07:03:15 crc kubenswrapper[4765]: I1210 07:03:15.119294 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="79bccbed-d859-4312-933d-ab156425cb73" containerName="registry-server" Dec 10 07:03:15 crc kubenswrapper[4765]: I1210 07:03:15.119454 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="79bccbed-d859-4312-933d-ab156425cb73" containerName="registry-server" Dec 10 07:03:15 crc kubenswrapper[4765]: I1210 07:03:15.120344 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" Dec 10 07:03:15 crc kubenswrapper[4765]: I1210 07:03:15.122616 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-xzlh8" Dec 10 07:03:15 crc kubenswrapper[4765]: I1210 07:03:15.128200 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57"] Dec 10 07:03:15 crc kubenswrapper[4765]: I1210 07:03:15.257675 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d6ec8e63-08e7-4e99-94e4-8c14becf803c-util\") pod \"a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57\" (UID: \"d6ec8e63-08e7-4e99-94e4-8c14becf803c\") " pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" Dec 10 07:03:15 crc kubenswrapper[4765]: I1210 07:03:15.257723 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d6ec8e63-08e7-4e99-94e4-8c14becf803c-bundle\") pod \"a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57\" (UID: \"d6ec8e63-08e7-4e99-94e4-8c14becf803c\") " pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" Dec 10 07:03:15 crc kubenswrapper[4765]: I1210 07:03:15.257884 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnc6v\" (UniqueName: \"kubernetes.io/projected/d6ec8e63-08e7-4e99-94e4-8c14becf803c-kube-api-access-pnc6v\") pod \"a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57\" (UID: \"d6ec8e63-08e7-4e99-94e4-8c14becf803c\") " pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" Dec 10 07:03:15 crc kubenswrapper[4765]: I1210 07:03:15.359475 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d6ec8e63-08e7-4e99-94e4-8c14becf803c-util\") pod \"a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57\" (UID: \"d6ec8e63-08e7-4e99-94e4-8c14becf803c\") " pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" Dec 10 07:03:15 crc kubenswrapper[4765]: I1210 07:03:15.359534 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d6ec8e63-08e7-4e99-94e4-8c14becf803c-bundle\") pod \"a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57\" (UID: \"d6ec8e63-08e7-4e99-94e4-8c14becf803c\") " pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" Dec 10 07:03:15 crc kubenswrapper[4765]: I1210 07:03:15.359590 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnc6v\" (UniqueName: \"kubernetes.io/projected/d6ec8e63-08e7-4e99-94e4-8c14becf803c-kube-api-access-pnc6v\") pod \"a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57\" (UID: \"d6ec8e63-08e7-4e99-94e4-8c14becf803c\") " pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" Dec 10 07:03:15 crc kubenswrapper[4765]: I1210 07:03:15.360017 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d6ec8e63-08e7-4e99-94e4-8c14becf803c-util\") pod \"a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57\" (UID: \"d6ec8e63-08e7-4e99-94e4-8c14becf803c\") " pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" Dec 10 07:03:15 crc kubenswrapper[4765]: I1210 07:03:15.360150 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d6ec8e63-08e7-4e99-94e4-8c14becf803c-bundle\") pod \"a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57\" (UID: \"d6ec8e63-08e7-4e99-94e4-8c14becf803c\") " pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" Dec 10 07:03:15 crc kubenswrapper[4765]: I1210 07:03:15.383186 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnc6v\" (UniqueName: \"kubernetes.io/projected/d6ec8e63-08e7-4e99-94e4-8c14becf803c-kube-api-access-pnc6v\") pod \"a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57\" (UID: \"d6ec8e63-08e7-4e99-94e4-8c14becf803c\") " pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" Dec 10 07:03:15 crc kubenswrapper[4765]: I1210 07:03:15.469672 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" Dec 10 07:03:15 crc kubenswrapper[4765]: I1210 07:03:15.852885 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57"] Dec 10 07:03:16 crc kubenswrapper[4765]: I1210 07:03:16.459165 4765 generic.go:334] "Generic (PLEG): container finished" podID="d6ec8e63-08e7-4e99-94e4-8c14becf803c" containerID="11c9fcb1c9348a602215b95f713722c83e34e5eada850dd507c0f2191b0ac77f" exitCode=0 Dec 10 07:03:16 crc kubenswrapper[4765]: I1210 07:03:16.459212 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" event={"ID":"d6ec8e63-08e7-4e99-94e4-8c14becf803c","Type":"ContainerDied","Data":"11c9fcb1c9348a602215b95f713722c83e34e5eada850dd507c0f2191b0ac77f"} Dec 10 07:03:16 crc kubenswrapper[4765]: I1210 07:03:16.459264 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" event={"ID":"d6ec8e63-08e7-4e99-94e4-8c14becf803c","Type":"ContainerStarted","Data":"264c3472eb3d599b112709064f304637a5dc2eb10ea255399bc13b26f86be46b"} Dec 10 07:03:17 crc kubenswrapper[4765]: I1210 07:03:17.466937 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" event={"ID":"d6ec8e63-08e7-4e99-94e4-8c14becf803c","Type":"ContainerStarted","Data":"1691dd30989248a504c98de6351fcd9fcbccddfa781705bf4ab81c8d9d9bb1d5"} Dec 10 07:03:18 crc kubenswrapper[4765]: I1210 07:03:18.476121 4765 generic.go:334] "Generic (PLEG): container finished" podID="d6ec8e63-08e7-4e99-94e4-8c14becf803c" containerID="1691dd30989248a504c98de6351fcd9fcbccddfa781705bf4ab81c8d9d9bb1d5" exitCode=0 Dec 10 07:03:18 crc kubenswrapper[4765]: I1210 07:03:18.476234 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" event={"ID":"d6ec8e63-08e7-4e99-94e4-8c14becf803c","Type":"ContainerDied","Data":"1691dd30989248a504c98de6351fcd9fcbccddfa781705bf4ab81c8d9d9bb1d5"} Dec 10 07:03:19 crc kubenswrapper[4765]: I1210 07:03:19.484374 4765 generic.go:334] "Generic (PLEG): container finished" podID="d6ec8e63-08e7-4e99-94e4-8c14becf803c" containerID="cb6913c87d866a00fbaf8e8a0774b2c559bc300e1e6c559e7709ac6438f4a188" exitCode=0 Dec 10 07:03:19 crc kubenswrapper[4765]: I1210 07:03:19.484427 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" event={"ID":"d6ec8e63-08e7-4e99-94e4-8c14becf803c","Type":"ContainerDied","Data":"cb6913c87d866a00fbaf8e8a0774b2c559bc300e1e6c559e7709ac6438f4a188"} Dec 10 07:03:20 crc kubenswrapper[4765]: I1210 07:03:20.728378 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" Dec 10 07:03:20 crc kubenswrapper[4765]: I1210 07:03:20.827373 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d6ec8e63-08e7-4e99-94e4-8c14becf803c-util\") pod \"d6ec8e63-08e7-4e99-94e4-8c14becf803c\" (UID: \"d6ec8e63-08e7-4e99-94e4-8c14becf803c\") " Dec 10 07:03:20 crc kubenswrapper[4765]: I1210 07:03:20.827491 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnc6v\" (UniqueName: \"kubernetes.io/projected/d6ec8e63-08e7-4e99-94e4-8c14becf803c-kube-api-access-pnc6v\") pod \"d6ec8e63-08e7-4e99-94e4-8c14becf803c\" (UID: \"d6ec8e63-08e7-4e99-94e4-8c14becf803c\") " Dec 10 07:03:20 crc kubenswrapper[4765]: I1210 07:03:20.827527 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d6ec8e63-08e7-4e99-94e4-8c14becf803c-bundle\") pod \"d6ec8e63-08e7-4e99-94e4-8c14becf803c\" (UID: \"d6ec8e63-08e7-4e99-94e4-8c14becf803c\") " Dec 10 07:03:20 crc kubenswrapper[4765]: I1210 07:03:20.828263 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6ec8e63-08e7-4e99-94e4-8c14becf803c-bundle" (OuterVolumeSpecName: "bundle") pod "d6ec8e63-08e7-4e99-94e4-8c14becf803c" (UID: "d6ec8e63-08e7-4e99-94e4-8c14becf803c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:03:20 crc kubenswrapper[4765]: I1210 07:03:20.832531 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6ec8e63-08e7-4e99-94e4-8c14becf803c-kube-api-access-pnc6v" (OuterVolumeSpecName: "kube-api-access-pnc6v") pod "d6ec8e63-08e7-4e99-94e4-8c14becf803c" (UID: "d6ec8e63-08e7-4e99-94e4-8c14becf803c"). InnerVolumeSpecName "kube-api-access-pnc6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:03:20 crc kubenswrapper[4765]: I1210 07:03:20.840888 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6ec8e63-08e7-4e99-94e4-8c14becf803c-util" (OuterVolumeSpecName: "util") pod "d6ec8e63-08e7-4e99-94e4-8c14becf803c" (UID: "d6ec8e63-08e7-4e99-94e4-8c14becf803c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:03:20 crc kubenswrapper[4765]: I1210 07:03:20.930578 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnc6v\" (UniqueName: \"kubernetes.io/projected/d6ec8e63-08e7-4e99-94e4-8c14becf803c-kube-api-access-pnc6v\") on node \"crc\" DevicePath \"\"" Dec 10 07:03:20 crc kubenswrapper[4765]: I1210 07:03:20.930645 4765 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d6ec8e63-08e7-4e99-94e4-8c14becf803c-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:03:20 crc kubenswrapper[4765]: I1210 07:03:20.930665 4765 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d6ec8e63-08e7-4e99-94e4-8c14becf803c-util\") on node \"crc\" DevicePath \"\"" Dec 10 07:03:21 crc kubenswrapper[4765]: I1210 07:03:21.498224 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" event={"ID":"d6ec8e63-08e7-4e99-94e4-8c14becf803c","Type":"ContainerDied","Data":"264c3472eb3d599b112709064f304637a5dc2eb10ea255399bc13b26f86be46b"} Dec 10 07:03:21 crc kubenswrapper[4765]: I1210 07:03:21.498271 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="264c3472eb3d599b112709064f304637a5dc2eb10ea255399bc13b26f86be46b" Dec 10 07:03:21 crc kubenswrapper[4765]: I1210 07:03:21.498307 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57" Dec 10 07:03:27 crc kubenswrapper[4765]: I1210 07:03:27.078920 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7979d445b4-h9kx6"] Dec 10 07:03:27 crc kubenswrapper[4765]: E1210 07:03:27.079687 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6ec8e63-08e7-4e99-94e4-8c14becf803c" containerName="pull" Dec 10 07:03:27 crc kubenswrapper[4765]: I1210 07:03:27.079699 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6ec8e63-08e7-4e99-94e4-8c14becf803c" containerName="pull" Dec 10 07:03:27 crc kubenswrapper[4765]: E1210 07:03:27.079724 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6ec8e63-08e7-4e99-94e4-8c14becf803c" containerName="extract" Dec 10 07:03:27 crc kubenswrapper[4765]: I1210 07:03:27.079731 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6ec8e63-08e7-4e99-94e4-8c14becf803c" containerName="extract" Dec 10 07:03:27 crc kubenswrapper[4765]: E1210 07:03:27.079739 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6ec8e63-08e7-4e99-94e4-8c14becf803c" containerName="util" Dec 10 07:03:27 crc kubenswrapper[4765]: I1210 07:03:27.079745 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6ec8e63-08e7-4e99-94e4-8c14becf803c" containerName="util" Dec 10 07:03:27 crc kubenswrapper[4765]: I1210 07:03:27.079851 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6ec8e63-08e7-4e99-94e4-8c14becf803c" containerName="extract" Dec 10 07:03:27 crc kubenswrapper[4765]: I1210 07:03:27.080257 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7979d445b4-h9kx6" Dec 10 07:03:27 crc kubenswrapper[4765]: I1210 07:03:27.082200 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-6nss8" Dec 10 07:03:27 crc kubenswrapper[4765]: I1210 07:03:27.101797 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7979d445b4-h9kx6"] Dec 10 07:03:27 crc kubenswrapper[4765]: I1210 07:03:27.204693 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9smm\" (UniqueName: \"kubernetes.io/projected/76c02d06-2f30-4291-94eb-7009dd061b2a-kube-api-access-f9smm\") pod \"openstack-operator-controller-operator-7979d445b4-h9kx6\" (UID: \"76c02d06-2f30-4291-94eb-7009dd061b2a\") " pod="openstack-operators/openstack-operator-controller-operator-7979d445b4-h9kx6" Dec 10 07:03:27 crc kubenswrapper[4765]: I1210 07:03:27.305661 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9smm\" (UniqueName: \"kubernetes.io/projected/76c02d06-2f30-4291-94eb-7009dd061b2a-kube-api-access-f9smm\") pod \"openstack-operator-controller-operator-7979d445b4-h9kx6\" (UID: \"76c02d06-2f30-4291-94eb-7009dd061b2a\") " pod="openstack-operators/openstack-operator-controller-operator-7979d445b4-h9kx6" Dec 10 07:03:27 crc kubenswrapper[4765]: I1210 07:03:27.331036 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9smm\" (UniqueName: \"kubernetes.io/projected/76c02d06-2f30-4291-94eb-7009dd061b2a-kube-api-access-f9smm\") pod \"openstack-operator-controller-operator-7979d445b4-h9kx6\" (UID: \"76c02d06-2f30-4291-94eb-7009dd061b2a\") " pod="openstack-operators/openstack-operator-controller-operator-7979d445b4-h9kx6" Dec 10 07:03:27 crc kubenswrapper[4765]: I1210 07:03:27.398814 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7979d445b4-h9kx6" Dec 10 07:03:27 crc kubenswrapper[4765]: I1210 07:03:27.943484 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7979d445b4-h9kx6"] Dec 10 07:03:28 crc kubenswrapper[4765]: I1210 07:03:28.569755 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7979d445b4-h9kx6" event={"ID":"76c02d06-2f30-4291-94eb-7009dd061b2a","Type":"ContainerStarted","Data":"2e025d90aed8f2f780e1aa5f6183e99a33948b45cb5450701cbc75cbf4bb0e14"} Dec 10 07:03:34 crc kubenswrapper[4765]: I1210 07:03:34.049973 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:03:34 crc kubenswrapper[4765]: I1210 07:03:34.050423 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:03:36 crc kubenswrapper[4765]: I1210 07:03:36.376047 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c76cr"] Dec 10 07:03:36 crc kubenswrapper[4765]: I1210 07:03:36.377862 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c76cr" Dec 10 07:03:36 crc kubenswrapper[4765]: I1210 07:03:36.392166 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c76cr"] Dec 10 07:03:36 crc kubenswrapper[4765]: I1210 07:03:36.450391 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r95dj\" (UniqueName: \"kubernetes.io/projected/f5025f75-7017-4833-bb0a-75851372f291-kube-api-access-r95dj\") pod \"certified-operators-c76cr\" (UID: \"f5025f75-7017-4833-bb0a-75851372f291\") " pod="openshift-marketplace/certified-operators-c76cr" Dec 10 07:03:36 crc kubenswrapper[4765]: I1210 07:03:36.450519 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5025f75-7017-4833-bb0a-75851372f291-catalog-content\") pod \"certified-operators-c76cr\" (UID: \"f5025f75-7017-4833-bb0a-75851372f291\") " pod="openshift-marketplace/certified-operators-c76cr" Dec 10 07:03:36 crc kubenswrapper[4765]: I1210 07:03:36.450567 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5025f75-7017-4833-bb0a-75851372f291-utilities\") pod \"certified-operators-c76cr\" (UID: \"f5025f75-7017-4833-bb0a-75851372f291\") " pod="openshift-marketplace/certified-operators-c76cr" Dec 10 07:03:36 crc kubenswrapper[4765]: I1210 07:03:36.552280 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r95dj\" (UniqueName: \"kubernetes.io/projected/f5025f75-7017-4833-bb0a-75851372f291-kube-api-access-r95dj\") pod \"certified-operators-c76cr\" (UID: \"f5025f75-7017-4833-bb0a-75851372f291\") " pod="openshift-marketplace/certified-operators-c76cr" Dec 10 07:03:36 crc kubenswrapper[4765]: I1210 07:03:36.552372 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5025f75-7017-4833-bb0a-75851372f291-catalog-content\") pod \"certified-operators-c76cr\" (UID: \"f5025f75-7017-4833-bb0a-75851372f291\") " pod="openshift-marketplace/certified-operators-c76cr" Dec 10 07:03:36 crc kubenswrapper[4765]: I1210 07:03:36.552397 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5025f75-7017-4833-bb0a-75851372f291-utilities\") pod \"certified-operators-c76cr\" (UID: \"f5025f75-7017-4833-bb0a-75851372f291\") " pod="openshift-marketplace/certified-operators-c76cr" Dec 10 07:03:36 crc kubenswrapper[4765]: I1210 07:03:36.552928 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5025f75-7017-4833-bb0a-75851372f291-utilities\") pod \"certified-operators-c76cr\" (UID: \"f5025f75-7017-4833-bb0a-75851372f291\") " pod="openshift-marketplace/certified-operators-c76cr" Dec 10 07:03:36 crc kubenswrapper[4765]: I1210 07:03:36.552995 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5025f75-7017-4833-bb0a-75851372f291-catalog-content\") pod \"certified-operators-c76cr\" (UID: \"f5025f75-7017-4833-bb0a-75851372f291\") " pod="openshift-marketplace/certified-operators-c76cr" Dec 10 07:03:36 crc kubenswrapper[4765]: I1210 07:03:36.569616 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r95dj\" (UniqueName: \"kubernetes.io/projected/f5025f75-7017-4833-bb0a-75851372f291-kube-api-access-r95dj\") pod \"certified-operators-c76cr\" (UID: \"f5025f75-7017-4833-bb0a-75851372f291\") " pod="openshift-marketplace/certified-operators-c76cr" Dec 10 07:03:36 crc kubenswrapper[4765]: I1210 07:03:36.647463 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7979d445b4-h9kx6" event={"ID":"76c02d06-2f30-4291-94eb-7009dd061b2a","Type":"ContainerStarted","Data":"d534048204104cd7e3cf6cd60425108b3449f39347899361769c79c8374b4b5d"} Dec 10 07:03:36 crc kubenswrapper[4765]: I1210 07:03:36.647658 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-7979d445b4-h9kx6" Dec 10 07:03:36 crc kubenswrapper[4765]: I1210 07:03:36.688156 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-7979d445b4-h9kx6" podStartSLOduration=1.582481192 podStartE2EDuration="9.688132106s" podCreationTimestamp="2025-12-10 07:03:27 +0000 UTC" firstStartedPulling="2025-12-10 07:03:27.946964472 +0000 UTC m=+927.673629788" lastFinishedPulling="2025-12-10 07:03:36.052615386 +0000 UTC m=+935.779280702" observedRunningTime="2025-12-10 07:03:36.68404878 +0000 UTC m=+936.410714096" watchObservedRunningTime="2025-12-10 07:03:36.688132106 +0000 UTC m=+936.414797422" Dec 10 07:03:36 crc kubenswrapper[4765]: I1210 07:03:36.695108 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c76cr" Dec 10 07:03:37 crc kubenswrapper[4765]: I1210 07:03:37.386886 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c76cr"] Dec 10 07:03:37 crc kubenswrapper[4765]: W1210 07:03:37.392243 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf5025f75_7017_4833_bb0a_75851372f291.slice/crio-2724dd100acc9de0f0fad783a94125997efb7e3e5bdbc4c6b208db1224782431 WatchSource:0}: Error finding container 2724dd100acc9de0f0fad783a94125997efb7e3e5bdbc4c6b208db1224782431: Status 404 returned error can't find the container with id 2724dd100acc9de0f0fad783a94125997efb7e3e5bdbc4c6b208db1224782431 Dec 10 07:03:37 crc kubenswrapper[4765]: I1210 07:03:37.654637 4765 generic.go:334] "Generic (PLEG): container finished" podID="f5025f75-7017-4833-bb0a-75851372f291" containerID="691b27ab89e0943c740635f7438b901e50a5071df3adec3a8742ae839b471728" exitCode=0 Dec 10 07:03:37 crc kubenswrapper[4765]: I1210 07:03:37.654773 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c76cr" event={"ID":"f5025f75-7017-4833-bb0a-75851372f291","Type":"ContainerDied","Data":"691b27ab89e0943c740635f7438b901e50a5071df3adec3a8742ae839b471728"} Dec 10 07:03:37 crc kubenswrapper[4765]: I1210 07:03:37.654970 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c76cr" event={"ID":"f5025f75-7017-4833-bb0a-75851372f291","Type":"ContainerStarted","Data":"2724dd100acc9de0f0fad783a94125997efb7e3e5bdbc4c6b208db1224782431"} Dec 10 07:03:38 crc kubenswrapper[4765]: I1210 07:03:38.666683 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c76cr" event={"ID":"f5025f75-7017-4833-bb0a-75851372f291","Type":"ContainerStarted","Data":"7f9ee38800fd072c0610eca7ccb40ef0944c111cfaff796aec1994efd9d83c46"} Dec 10 07:03:39 crc kubenswrapper[4765]: I1210 07:03:39.673690 4765 generic.go:334] "Generic (PLEG): container finished" podID="f5025f75-7017-4833-bb0a-75851372f291" containerID="7f9ee38800fd072c0610eca7ccb40ef0944c111cfaff796aec1994efd9d83c46" exitCode=0 Dec 10 07:03:39 crc kubenswrapper[4765]: I1210 07:03:39.673783 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c76cr" event={"ID":"f5025f75-7017-4833-bb0a-75851372f291","Type":"ContainerDied","Data":"7f9ee38800fd072c0610eca7ccb40ef0944c111cfaff796aec1994efd9d83c46"} Dec 10 07:03:41 crc kubenswrapper[4765]: I1210 07:03:41.686764 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c76cr" event={"ID":"f5025f75-7017-4833-bb0a-75851372f291","Type":"ContainerStarted","Data":"a33f019e25080f7698b1f2330377a8c1f6d071546e07db76d0db241c718989bc"} Dec 10 07:03:41 crc kubenswrapper[4765]: I1210 07:03:41.711846 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c76cr" podStartSLOduration=2.727930561 podStartE2EDuration="5.71182771s" podCreationTimestamp="2025-12-10 07:03:36 +0000 UTC" firstStartedPulling="2025-12-10 07:03:37.655875174 +0000 UTC m=+937.382540500" lastFinishedPulling="2025-12-10 07:03:40.639772313 +0000 UTC m=+940.366437649" observedRunningTime="2025-12-10 07:03:41.707439675 +0000 UTC m=+941.434104991" watchObservedRunningTime="2025-12-10 07:03:41.71182771 +0000 UTC m=+941.438493026" Dec 10 07:03:46 crc kubenswrapper[4765]: I1210 07:03:46.695766 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c76cr" Dec 10 07:03:46 crc kubenswrapper[4765]: I1210 07:03:46.696075 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c76cr" Dec 10 07:03:46 crc kubenswrapper[4765]: I1210 07:03:46.735634 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c76cr" Dec 10 07:03:46 crc kubenswrapper[4765]: I1210 07:03:46.777396 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c76cr" Dec 10 07:03:46 crc kubenswrapper[4765]: I1210 07:03:46.967703 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c76cr"] Dec 10 07:03:47 crc kubenswrapper[4765]: I1210 07:03:47.402104 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-7979d445b4-h9kx6" Dec 10 07:03:48 crc kubenswrapper[4765]: I1210 07:03:48.730427 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-c76cr" podUID="f5025f75-7017-4833-bb0a-75851372f291" containerName="registry-server" containerID="cri-o://a33f019e25080f7698b1f2330377a8c1f6d071546e07db76d0db241c718989bc" gracePeriod=2 Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.246033 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c76cr" Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.355900 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5025f75-7017-4833-bb0a-75851372f291-catalog-content\") pod \"f5025f75-7017-4833-bb0a-75851372f291\" (UID: \"f5025f75-7017-4833-bb0a-75851372f291\") " Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.355998 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5025f75-7017-4833-bb0a-75851372f291-utilities\") pod \"f5025f75-7017-4833-bb0a-75851372f291\" (UID: \"f5025f75-7017-4833-bb0a-75851372f291\") " Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.356034 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r95dj\" (UniqueName: \"kubernetes.io/projected/f5025f75-7017-4833-bb0a-75851372f291-kube-api-access-r95dj\") pod \"f5025f75-7017-4833-bb0a-75851372f291\" (UID: \"f5025f75-7017-4833-bb0a-75851372f291\") " Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.356813 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5025f75-7017-4833-bb0a-75851372f291-utilities" (OuterVolumeSpecName: "utilities") pod "f5025f75-7017-4833-bb0a-75851372f291" (UID: "f5025f75-7017-4833-bb0a-75851372f291"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.362744 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5025f75-7017-4833-bb0a-75851372f291-kube-api-access-r95dj" (OuterVolumeSpecName: "kube-api-access-r95dj") pod "f5025f75-7017-4833-bb0a-75851372f291" (UID: "f5025f75-7017-4833-bb0a-75851372f291"). InnerVolumeSpecName "kube-api-access-r95dj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.405071 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5025f75-7017-4833-bb0a-75851372f291-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f5025f75-7017-4833-bb0a-75851372f291" (UID: "f5025f75-7017-4833-bb0a-75851372f291"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.457854 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5025f75-7017-4833-bb0a-75851372f291-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.457888 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r95dj\" (UniqueName: \"kubernetes.io/projected/f5025f75-7017-4833-bb0a-75851372f291-kube-api-access-r95dj\") on node \"crc\" DevicePath \"\"" Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.457898 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5025f75-7017-4833-bb0a-75851372f291-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.743807 4765 generic.go:334] "Generic (PLEG): container finished" podID="f5025f75-7017-4833-bb0a-75851372f291" containerID="a33f019e25080f7698b1f2330377a8c1f6d071546e07db76d0db241c718989bc" exitCode=0 Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.743846 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c76cr" event={"ID":"f5025f75-7017-4833-bb0a-75851372f291","Type":"ContainerDied","Data":"a33f019e25080f7698b1f2330377a8c1f6d071546e07db76d0db241c718989bc"} Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.743856 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c76cr" Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.743871 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c76cr" event={"ID":"f5025f75-7017-4833-bb0a-75851372f291","Type":"ContainerDied","Data":"2724dd100acc9de0f0fad783a94125997efb7e3e5bdbc4c6b208db1224782431"} Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.743889 4765 scope.go:117] "RemoveContainer" containerID="a33f019e25080f7698b1f2330377a8c1f6d071546e07db76d0db241c718989bc" Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.763322 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c76cr"] Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.765766 4765 scope.go:117] "RemoveContainer" containerID="7f9ee38800fd072c0610eca7ccb40ef0944c111cfaff796aec1994efd9d83c46" Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.768915 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-c76cr"] Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.795285 4765 scope.go:117] "RemoveContainer" containerID="691b27ab89e0943c740635f7438b901e50a5071df3adec3a8742ae839b471728" Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.809272 4765 scope.go:117] "RemoveContainer" containerID="a33f019e25080f7698b1f2330377a8c1f6d071546e07db76d0db241c718989bc" Dec 10 07:03:50 crc kubenswrapper[4765]: E1210 07:03:50.809673 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a33f019e25080f7698b1f2330377a8c1f6d071546e07db76d0db241c718989bc\": container with ID starting with a33f019e25080f7698b1f2330377a8c1f6d071546e07db76d0db241c718989bc not found: ID does not exist" containerID="a33f019e25080f7698b1f2330377a8c1f6d071546e07db76d0db241c718989bc" Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.809732 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a33f019e25080f7698b1f2330377a8c1f6d071546e07db76d0db241c718989bc"} err="failed to get container status \"a33f019e25080f7698b1f2330377a8c1f6d071546e07db76d0db241c718989bc\": rpc error: code = NotFound desc = could not find container \"a33f019e25080f7698b1f2330377a8c1f6d071546e07db76d0db241c718989bc\": container with ID starting with a33f019e25080f7698b1f2330377a8c1f6d071546e07db76d0db241c718989bc not found: ID does not exist" Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.809752 4765 scope.go:117] "RemoveContainer" containerID="7f9ee38800fd072c0610eca7ccb40ef0944c111cfaff796aec1994efd9d83c46" Dec 10 07:03:50 crc kubenswrapper[4765]: E1210 07:03:50.810049 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f9ee38800fd072c0610eca7ccb40ef0944c111cfaff796aec1994efd9d83c46\": container with ID starting with 7f9ee38800fd072c0610eca7ccb40ef0944c111cfaff796aec1994efd9d83c46 not found: ID does not exist" containerID="7f9ee38800fd072c0610eca7ccb40ef0944c111cfaff796aec1994efd9d83c46" Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.810069 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f9ee38800fd072c0610eca7ccb40ef0944c111cfaff796aec1994efd9d83c46"} err="failed to get container status \"7f9ee38800fd072c0610eca7ccb40ef0944c111cfaff796aec1994efd9d83c46\": rpc error: code = NotFound desc = could not find container \"7f9ee38800fd072c0610eca7ccb40ef0944c111cfaff796aec1994efd9d83c46\": container with ID starting with 7f9ee38800fd072c0610eca7ccb40ef0944c111cfaff796aec1994efd9d83c46 not found: ID does not exist" Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.810098 4765 scope.go:117] "RemoveContainer" containerID="691b27ab89e0943c740635f7438b901e50a5071df3adec3a8742ae839b471728" Dec 10 07:03:50 crc kubenswrapper[4765]: E1210 07:03:50.810316 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"691b27ab89e0943c740635f7438b901e50a5071df3adec3a8742ae839b471728\": container with ID starting with 691b27ab89e0943c740635f7438b901e50a5071df3adec3a8742ae839b471728 not found: ID does not exist" containerID="691b27ab89e0943c740635f7438b901e50a5071df3adec3a8742ae839b471728" Dec 10 07:03:50 crc kubenswrapper[4765]: I1210 07:03:50.810355 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"691b27ab89e0943c740635f7438b901e50a5071df3adec3a8742ae839b471728"} err="failed to get container status \"691b27ab89e0943c740635f7438b901e50a5071df3adec3a8742ae839b471728\": rpc error: code = NotFound desc = could not find container \"691b27ab89e0943c740635f7438b901e50a5071df3adec3a8742ae839b471728\": container with ID starting with 691b27ab89e0943c740635f7438b901e50a5071df3adec3a8742ae839b471728 not found: ID does not exist" Dec 10 07:03:52 crc kubenswrapper[4765]: I1210 07:03:52.597475 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5025f75-7017-4833-bb0a-75851372f291" path="/var/lib/kubelet/pods/f5025f75-7017-4833-bb0a-75851372f291/volumes" Dec 10 07:04:04 crc kubenswrapper[4765]: I1210 07:04:04.049333 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:04:04 crc kubenswrapper[4765]: I1210 07:04:04.049880 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:04:06 crc kubenswrapper[4765]: I1210 07:04:06.423948 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vgspd"] Dec 10 07:04:06 crc kubenswrapper[4765]: E1210 07:04:06.424575 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5025f75-7017-4833-bb0a-75851372f291" containerName="extract-utilities" Dec 10 07:04:06 crc kubenswrapper[4765]: I1210 07:04:06.424593 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5025f75-7017-4833-bb0a-75851372f291" containerName="extract-utilities" Dec 10 07:04:06 crc kubenswrapper[4765]: E1210 07:04:06.424617 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5025f75-7017-4833-bb0a-75851372f291" containerName="extract-content" Dec 10 07:04:06 crc kubenswrapper[4765]: I1210 07:04:06.424625 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5025f75-7017-4833-bb0a-75851372f291" containerName="extract-content" Dec 10 07:04:06 crc kubenswrapper[4765]: E1210 07:04:06.424650 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5025f75-7017-4833-bb0a-75851372f291" containerName="registry-server" Dec 10 07:04:06 crc kubenswrapper[4765]: I1210 07:04:06.424657 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5025f75-7017-4833-bb0a-75851372f291" containerName="registry-server" Dec 10 07:04:06 crc kubenswrapper[4765]: I1210 07:04:06.424787 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5025f75-7017-4833-bb0a-75851372f291" containerName="registry-server" Dec 10 07:04:06 crc kubenswrapper[4765]: I1210 07:04:06.425839 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vgspd" Dec 10 07:04:06 crc kubenswrapper[4765]: I1210 07:04:06.448167 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vgspd"] Dec 10 07:04:06 crc kubenswrapper[4765]: I1210 07:04:06.514200 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d5b1a2a-4382-4aa4-9a12-676b126e421b-catalog-content\") pod \"community-operators-vgspd\" (UID: \"3d5b1a2a-4382-4aa4-9a12-676b126e421b\") " pod="openshift-marketplace/community-operators-vgspd" Dec 10 07:04:06 crc kubenswrapper[4765]: I1210 07:04:06.514397 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5zjv\" (UniqueName: \"kubernetes.io/projected/3d5b1a2a-4382-4aa4-9a12-676b126e421b-kube-api-access-f5zjv\") pod \"community-operators-vgspd\" (UID: \"3d5b1a2a-4382-4aa4-9a12-676b126e421b\") " pod="openshift-marketplace/community-operators-vgspd" Dec 10 07:04:06 crc kubenswrapper[4765]: I1210 07:04:06.514492 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d5b1a2a-4382-4aa4-9a12-676b126e421b-utilities\") pod \"community-operators-vgspd\" (UID: \"3d5b1a2a-4382-4aa4-9a12-676b126e421b\") " pod="openshift-marketplace/community-operators-vgspd" Dec 10 07:04:06 crc kubenswrapper[4765]: I1210 07:04:06.687162 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d5b1a2a-4382-4aa4-9a12-676b126e421b-catalog-content\") pod \"community-operators-vgspd\" (UID: \"3d5b1a2a-4382-4aa4-9a12-676b126e421b\") " pod="openshift-marketplace/community-operators-vgspd" Dec 10 07:04:06 crc kubenswrapper[4765]: I1210 07:04:06.687320 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5zjv\" (UniqueName: \"kubernetes.io/projected/3d5b1a2a-4382-4aa4-9a12-676b126e421b-kube-api-access-f5zjv\") pod \"community-operators-vgspd\" (UID: \"3d5b1a2a-4382-4aa4-9a12-676b126e421b\") " pod="openshift-marketplace/community-operators-vgspd" Dec 10 07:04:06 crc kubenswrapper[4765]: I1210 07:04:06.687366 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d5b1a2a-4382-4aa4-9a12-676b126e421b-utilities\") pod \"community-operators-vgspd\" (UID: \"3d5b1a2a-4382-4aa4-9a12-676b126e421b\") " pod="openshift-marketplace/community-operators-vgspd" Dec 10 07:04:06 crc kubenswrapper[4765]: I1210 07:04:06.687916 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d5b1a2a-4382-4aa4-9a12-676b126e421b-utilities\") pod \"community-operators-vgspd\" (UID: \"3d5b1a2a-4382-4aa4-9a12-676b126e421b\") " pod="openshift-marketplace/community-operators-vgspd" Dec 10 07:04:06 crc kubenswrapper[4765]: I1210 07:04:06.687969 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d5b1a2a-4382-4aa4-9a12-676b126e421b-catalog-content\") pod \"community-operators-vgspd\" (UID: \"3d5b1a2a-4382-4aa4-9a12-676b126e421b\") " pod="openshift-marketplace/community-operators-vgspd" Dec 10 07:04:06 crc kubenswrapper[4765]: I1210 07:04:06.718593 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5zjv\" (UniqueName: \"kubernetes.io/projected/3d5b1a2a-4382-4aa4-9a12-676b126e421b-kube-api-access-f5zjv\") pod \"community-operators-vgspd\" (UID: \"3d5b1a2a-4382-4aa4-9a12-676b126e421b\") " pod="openshift-marketplace/community-operators-vgspd" Dec 10 07:04:06 crc kubenswrapper[4765]: I1210 07:04:06.748295 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vgspd" Dec 10 07:04:07 crc kubenswrapper[4765]: I1210 07:04:07.304247 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vgspd"] Dec 10 07:04:08 crc kubenswrapper[4765]: I1210 07:04:08.177735 4765 generic.go:334] "Generic (PLEG): container finished" podID="3d5b1a2a-4382-4aa4-9a12-676b126e421b" containerID="7a8fd38c1b75c2926625cfeba451de9ebd1d74ac0ab1dd4bcc30057f946ae9f2" exitCode=0 Dec 10 07:04:08 crc kubenswrapper[4765]: I1210 07:04:08.177825 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgspd" event={"ID":"3d5b1a2a-4382-4aa4-9a12-676b126e421b","Type":"ContainerDied","Data":"7a8fd38c1b75c2926625cfeba451de9ebd1d74ac0ab1dd4bcc30057f946ae9f2"} Dec 10 07:04:08 crc kubenswrapper[4765]: I1210 07:04:08.178265 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgspd" event={"ID":"3d5b1a2a-4382-4aa4-9a12-676b126e421b","Type":"ContainerStarted","Data":"548243920d61b7f60810a4c5c3cd36c332dd14928d5b8f80c708ebb7d8a59b39"} Dec 10 07:04:10 crc kubenswrapper[4765]: I1210 07:04:10.210958 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgspd" event={"ID":"3d5b1a2a-4382-4aa4-9a12-676b126e421b","Type":"ContainerStarted","Data":"789fd1afea22152608957718e26e26b002d1ee82226c3445dd48e3da7a2d5fc2"} Dec 10 07:04:11 crc kubenswrapper[4765]: I1210 07:04:11.420878 4765 generic.go:334] "Generic (PLEG): container finished" podID="3d5b1a2a-4382-4aa4-9a12-676b126e421b" containerID="789fd1afea22152608957718e26e26b002d1ee82226c3445dd48e3da7a2d5fc2" exitCode=0 Dec 10 07:04:11 crc kubenswrapper[4765]: I1210 07:04:11.421254 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgspd" event={"ID":"3d5b1a2a-4382-4aa4-9a12-676b126e421b","Type":"ContainerDied","Data":"789fd1afea22152608957718e26e26b002d1ee82226c3445dd48e3da7a2d5fc2"} Dec 10 07:04:12 crc kubenswrapper[4765]: I1210 07:04:12.439349 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgspd" event={"ID":"3d5b1a2a-4382-4aa4-9a12-676b126e421b","Type":"ContainerStarted","Data":"cc9e0ce148c1061ddb8530580be4223fa4e477660fe7f5db8b1f868d98d0db49"} Dec 10 07:04:12 crc kubenswrapper[4765]: I1210 07:04:12.477293 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vgspd" podStartSLOduration=2.705073379 podStartE2EDuration="6.477272328s" podCreationTimestamp="2025-12-10 07:04:06 +0000 UTC" firstStartedPulling="2025-12-10 07:04:08.179275742 +0000 UTC m=+967.905941058" lastFinishedPulling="2025-12-10 07:04:11.951474691 +0000 UTC m=+971.678140007" observedRunningTime="2025-12-10 07:04:12.470571647 +0000 UTC m=+972.197236973" watchObservedRunningTime="2025-12-10 07:04:12.477272328 +0000 UTC m=+972.203937634" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.443277 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-mjmkd"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.444538 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-mjmkd" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.446440 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-h2dpn" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.447899 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-kgvfr"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.448917 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-kgvfr" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.450816 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-zbdhv" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.461122 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-mjmkd"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.473053 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-zd5jq"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.475017 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zd5jq" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.476989 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-kgvfr"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.478290 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-9bjtx" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.486322 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-9jg4f"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.487870 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-9jg4f" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.493989 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-pkckd" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.504958 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-zd5jq"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.539079 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-9jg4f"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.607319 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-hbkmm"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.608379 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-hbkmm"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.608505 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-hbkmm" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.614467 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-9vz66" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.633819 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gmdl\" (UniqueName: \"kubernetes.io/projected/98ff262d-fdd9-4e2e-9cd9-4f570716bf02-kube-api-access-6gmdl\") pod \"designate-operator-controller-manager-697fb699cf-zd5jq\" (UID: \"98ff262d-fdd9-4e2e-9cd9-4f570716bf02\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zd5jq" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.634204 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zc7q2\" (UniqueName: \"kubernetes.io/projected/1ccf5120-6d6b-49e6-ae2d-08464b3ab398-kube-api-access-zc7q2\") pod \"cinder-operator-controller-manager-6c677c69b-kgvfr\" (UID: \"1ccf5120-6d6b-49e6-ae2d-08464b3ab398\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-kgvfr" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.634339 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtck5\" (UniqueName: \"kubernetes.io/projected/87a90f47-7e73-45ff-9f98-93bec3ebe12e-kube-api-access-mtck5\") pod \"glance-operator-controller-manager-5697bb5779-9jg4f\" (UID: \"87a90f47-7e73-45ff-9f98-93bec3ebe12e\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-9jg4f" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.634456 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nftz9\" (UniqueName: \"kubernetes.io/projected/f883d81e-6c50-4d92-878d-253a954fcd7a-kube-api-access-nftz9\") pod \"barbican-operator-controller-manager-7d9dfd778-mjmkd\" (UID: \"f883d81e-6c50-4d92-878d-253a954fcd7a\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-mjmkd" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.634628 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-sfbj7"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.635969 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-sfbj7" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.640054 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-6s5fx" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.643342 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-sfbj7"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.694095 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.695211 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.701216 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.706907 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.707107 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-64wn8" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.716122 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-xqrjv"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.722717 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-967d97867-xqrjv" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.725570 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-6w6dj" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.735320 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5b65\" (UniqueName: \"kubernetes.io/projected/85f78838-e7c3-4c58-8be9-5cb847b81d6d-kube-api-access-r5b65\") pod \"horizon-operator-controller-manager-68c6d99b8f-sfbj7\" (UID: \"85f78838-e7c3-4c58-8be9-5cb847b81d6d\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-sfbj7" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.735374 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gmdl\" (UniqueName: \"kubernetes.io/projected/98ff262d-fdd9-4e2e-9cd9-4f570716bf02-kube-api-access-6gmdl\") pod \"designate-operator-controller-manager-697fb699cf-zd5jq\" (UID: \"98ff262d-fdd9-4e2e-9cd9-4f570716bf02\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zd5jq" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.735432 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gg25q\" (UniqueName: \"kubernetes.io/projected/eb079565-c2aa-4756-b335-df9a6dac3758-kube-api-access-gg25q\") pod \"heat-operator-controller-manager-5f64f6f8bb-hbkmm\" (UID: \"eb079565-c2aa-4756-b335-df9a6dac3758\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-hbkmm" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.735465 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zc7q2\" (UniqueName: \"kubernetes.io/projected/1ccf5120-6d6b-49e6-ae2d-08464b3ab398-kube-api-access-zc7q2\") pod \"cinder-operator-controller-manager-6c677c69b-kgvfr\" (UID: \"1ccf5120-6d6b-49e6-ae2d-08464b3ab398\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-kgvfr" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.735495 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtck5\" (UniqueName: \"kubernetes.io/projected/87a90f47-7e73-45ff-9f98-93bec3ebe12e-kube-api-access-mtck5\") pod \"glance-operator-controller-manager-5697bb5779-9jg4f\" (UID: \"87a90f47-7e73-45ff-9f98-93bec3ebe12e\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-9jg4f" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.735528 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nftz9\" (UniqueName: \"kubernetes.io/projected/f883d81e-6c50-4d92-878d-253a954fcd7a-kube-api-access-nftz9\") pod \"barbican-operator-controller-manager-7d9dfd778-mjmkd\" (UID: \"f883d81e-6c50-4d92-878d-253a954fcd7a\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-mjmkd" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.741920 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-xqrjv"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.748937 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vgspd" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.749837 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vgspd" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.765233 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtck5\" (UniqueName: \"kubernetes.io/projected/87a90f47-7e73-45ff-9f98-93bec3ebe12e-kube-api-access-mtck5\") pod \"glance-operator-controller-manager-5697bb5779-9jg4f\" (UID: \"87a90f47-7e73-45ff-9f98-93bec3ebe12e\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-9jg4f" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.769887 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-25qhw"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.770935 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-25qhw" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.773823 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nftz9\" (UniqueName: \"kubernetes.io/projected/f883d81e-6c50-4d92-878d-253a954fcd7a-kube-api-access-nftz9\") pod \"barbican-operator-controller-manager-7d9dfd778-mjmkd\" (UID: \"f883d81e-6c50-4d92-878d-253a954fcd7a\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-mjmkd" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.774176 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gmdl\" (UniqueName: \"kubernetes.io/projected/98ff262d-fdd9-4e2e-9cd9-4f570716bf02-kube-api-access-6gmdl\") pod \"designate-operator-controller-manager-697fb699cf-zd5jq\" (UID: \"98ff262d-fdd9-4e2e-9cd9-4f570716bf02\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zd5jq" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.774899 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-pggqx" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.786672 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zc7q2\" (UniqueName: \"kubernetes.io/projected/1ccf5120-6d6b-49e6-ae2d-08464b3ab398-kube-api-access-zc7q2\") pod \"cinder-operator-controller-manager-6c677c69b-kgvfr\" (UID: \"1ccf5120-6d6b-49e6-ae2d-08464b3ab398\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-kgvfr" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.786801 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-5b5fd79c9c-4j6rt"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.787796 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-4j6rt" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.801144 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-25qhw"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.808425 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zd5jq" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.812829 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-vww2b" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.821426 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wrjkf"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.822479 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wrjkf" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.826399 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-cw9gt" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.827454 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-mtl8t"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.828579 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-mtl8t" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.836391 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-7pfvq" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.836580 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wrjkf"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.843433 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zdnj\" (UniqueName: \"kubernetes.io/projected/06b9f74b-2726-43cb-9353-dec4e4a34f01-kube-api-access-6zdnj\") pod \"infra-operator-controller-manager-78d48bff9d-7f8r9\" (UID: \"06b9f74b-2726-43cb-9353-dec4e4a34f01\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.843487 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gg25q\" (UniqueName: \"kubernetes.io/projected/eb079565-c2aa-4756-b335-df9a6dac3758-kube-api-access-gg25q\") pod \"heat-operator-controller-manager-5f64f6f8bb-hbkmm\" (UID: \"eb079565-c2aa-4756-b335-df9a6dac3758\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-hbkmm" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.843523 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/06b9f74b-2726-43cb-9353-dec4e4a34f01-cert\") pod \"infra-operator-controller-manager-78d48bff9d-7f8r9\" (UID: \"06b9f74b-2726-43cb-9353-dec4e4a34f01\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.843604 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5b65\" (UniqueName: \"kubernetes.io/projected/85f78838-e7c3-4c58-8be9-5cb847b81d6d-kube-api-access-r5b65\") pod \"horizon-operator-controller-manager-68c6d99b8f-sfbj7\" (UID: \"85f78838-e7c3-4c58-8be9-5cb847b81d6d\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-sfbj7" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.843628 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26smp\" (UniqueName: \"kubernetes.io/projected/f8c7e1f9-bf7b-4b50-9ee2-b2e73735e720-kube-api-access-26smp\") pod \"ironic-operator-controller-manager-967d97867-xqrjv\" (UID: \"f8c7e1f9-bf7b-4b50-9ee2-b2e73735e720\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-xqrjv" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.844431 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5b5fd79c9c-4j6rt"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.848462 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-9jg4f" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.859061 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-mtl8t"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.871866 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gg25q\" (UniqueName: \"kubernetes.io/projected/eb079565-c2aa-4756-b335-df9a6dac3758-kube-api-access-gg25q\") pod \"heat-operator-controller-manager-5f64f6f8bb-hbkmm\" (UID: \"eb079565-c2aa-4756-b335-df9a6dac3758\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-hbkmm" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.878821 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-767vs"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.879885 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-767vs" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.885154 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-767vs"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.886811 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-8lxdq" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.897103 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5b65\" (UniqueName: \"kubernetes.io/projected/85f78838-e7c3-4c58-8be9-5cb847b81d6d-kube-api-access-r5b65\") pod \"horizon-operator-controller-manager-68c6d99b8f-sfbj7\" (UID: \"85f78838-e7c3-4c58-8be9-5cb847b81d6d\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-sfbj7" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.919572 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-tdvd4"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.920880 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-tdvd4" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.936729 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-hbkmm" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.938516 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-lshtb" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.941890 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vgspd" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.946815 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxdx5\" (UniqueName: \"kubernetes.io/projected/f9c242b2-1388-431e-8c76-1022426252c7-kube-api-access-xxdx5\") pod \"keystone-operator-controller-manager-7765d96ddf-25qhw\" (UID: \"f9c242b2-1388-431e-8c76-1022426252c7\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-25qhw" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.946866 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znxgb\" (UniqueName: \"kubernetes.io/projected/44bbc33f-2848-42f4-b8e9-d99ba69ea07b-kube-api-access-znxgb\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-wrjkf\" (UID: \"44bbc33f-2848-42f4-b8e9-d99ba69ea07b\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wrjkf" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.946936 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zdnj\" (UniqueName: \"kubernetes.io/projected/06b9f74b-2726-43cb-9353-dec4e4a34f01-kube-api-access-6zdnj\") pod \"infra-operator-controller-manager-78d48bff9d-7f8r9\" (UID: \"06b9f74b-2726-43cb-9353-dec4e4a34f01\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.946979 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/06b9f74b-2726-43cb-9353-dec4e4a34f01-cert\") pod \"infra-operator-controller-manager-78d48bff9d-7f8r9\" (UID: \"06b9f74b-2726-43cb-9353-dec4e4a34f01\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.947007 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcl5d\" (UniqueName: \"kubernetes.io/projected/9b065504-7b79-4bb0-b583-a37953820f14-kube-api-access-tcl5d\") pod \"mariadb-operator-controller-manager-79c8c4686c-mtl8t\" (UID: \"9b065504-7b79-4bb0-b583-a37953820f14\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-mtl8t" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.947049 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7z4v\" (UniqueName: \"kubernetes.io/projected/8d2a8965-e959-4d48-bc75-e91d2c235898-kube-api-access-k7z4v\") pod \"manila-operator-controller-manager-5b5fd79c9c-4j6rt\" (UID: \"8d2a8965-e959-4d48-bc75-e91d2c235898\") " pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-4j6rt" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.947106 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26smp\" (UniqueName: \"kubernetes.io/projected/f8c7e1f9-bf7b-4b50-9ee2-b2e73735e720-kube-api-access-26smp\") pod \"ironic-operator-controller-manager-967d97867-xqrjv\" (UID: \"f8c7e1f9-bf7b-4b50-9ee2-b2e73735e720\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-xqrjv" Dec 10 07:04:16 crc kubenswrapper[4765]: E1210 07:04:16.947742 4765 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 10 07:04:16 crc kubenswrapper[4765]: E1210 07:04:16.947806 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06b9f74b-2726-43cb-9353-dec4e4a34f01-cert podName:06b9f74b-2726-43cb-9353-dec4e4a34f01 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:17.447784055 +0000 UTC m=+977.174449371 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/06b9f74b-2726-43cb-9353-dec4e4a34f01-cert") pod "infra-operator-controller-manager-78d48bff9d-7f8r9" (UID: "06b9f74b-2726-43cb-9353-dec4e4a34f01") : secret "infra-operator-webhook-server-cert" not found Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.957307 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-sfbj7" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.966204 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-tdvd4"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.971248 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zdnj\" (UniqueName: \"kubernetes.io/projected/06b9f74b-2726-43cb-9353-dec4e4a34f01-kube-api-access-6zdnj\") pod \"infra-operator-controller-manager-78d48bff9d-7f8r9\" (UID: \"06b9f74b-2726-43cb-9353-dec4e4a34f01\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.972519 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26smp\" (UniqueName: \"kubernetes.io/projected/f8c7e1f9-bf7b-4b50-9ee2-b2e73735e720-kube-api-access-26smp\") pod \"ironic-operator-controller-manager-967d97867-xqrjv\" (UID: \"f8c7e1f9-bf7b-4b50-9ee2-b2e73735e720\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-xqrjv" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.977695 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-g5tv6"] Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.979006 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g5tv6" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.982783 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-slsgr" Dec 10 07:04:16 crc kubenswrapper[4765]: I1210 07:04:16.989053 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.005735 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.008541 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-d9vn6" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.008733 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.010911 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-g5tv6"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.020093 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.030297 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-p286q"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.085139 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-mjmkd" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.088019 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-967d97867-xqrjv" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.232285 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-kgvfr" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.273367 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-p286q"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.273555 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-p286q" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.276410 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-dzstl" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.277027 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxdx5\" (UniqueName: \"kubernetes.io/projected/f9c242b2-1388-431e-8c76-1022426252c7-kube-api-access-xxdx5\") pod \"keystone-operator-controller-manager-7765d96ddf-25qhw\" (UID: \"f9c242b2-1388-431e-8c76-1022426252c7\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-25qhw" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.277175 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znxgb\" (UniqueName: \"kubernetes.io/projected/44bbc33f-2848-42f4-b8e9-d99ba69ea07b-kube-api-access-znxgb\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-wrjkf\" (UID: \"44bbc33f-2848-42f4-b8e9-d99ba69ea07b\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wrjkf" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.277477 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8jlv\" (UniqueName: \"kubernetes.io/projected/23fa0793-f896-4059-b33e-fe00ea97dbab-kube-api-access-w8jlv\") pod \"octavia-operator-controller-manager-998648c74-tdvd4\" (UID: \"23fa0793-f896-4059-b33e-fe00ea97dbab\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-tdvd4" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.277573 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9zfz\" (UniqueName: \"kubernetes.io/projected/06116776-17f7-42fd-a55c-8965a8932070-kube-api-access-j9zfz\") pod \"nova-operator-controller-manager-697bc559fc-767vs\" (UID: \"06116776-17f7-42fd-a55c-8965a8932070\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-767vs" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.277901 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcl5d\" (UniqueName: \"kubernetes.io/projected/9b065504-7b79-4bb0-b583-a37953820f14-kube-api-access-tcl5d\") pod \"mariadb-operator-controller-manager-79c8c4686c-mtl8t\" (UID: \"9b065504-7b79-4bb0-b583-a37953820f14\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-mtl8t" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.278286 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7z4v\" (UniqueName: \"kubernetes.io/projected/8d2a8965-e959-4d48-bc75-e91d2c235898-kube-api-access-k7z4v\") pod \"manila-operator-controller-manager-5b5fd79c9c-4j6rt\" (UID: \"8d2a8965-e959-4d48-bc75-e91d2c235898\") " pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-4j6rt" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.361687 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxdx5\" (UniqueName: \"kubernetes.io/projected/f9c242b2-1388-431e-8c76-1022426252c7-kube-api-access-xxdx5\") pod \"keystone-operator-controller-manager-7765d96ddf-25qhw\" (UID: \"f9c242b2-1388-431e-8c76-1022426252c7\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-25qhw" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.370478 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-pwpkv"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.371600 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znxgb\" (UniqueName: \"kubernetes.io/projected/44bbc33f-2848-42f4-b8e9-d99ba69ea07b-kube-api-access-znxgb\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-wrjkf\" (UID: \"44bbc33f-2848-42f4-b8e9-d99ba69ea07b\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wrjkf" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.372235 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-pwpkv" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.385631 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-f4f7c" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.394930 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-pwpkv"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.404839 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-58d5ff84df-kt2js"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.406235 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-kt2js" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.412134 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-9d4zv" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.419530 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-58d5ff84df-kt2js"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.420208 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4q2d\" (UniqueName: \"kubernetes.io/projected/32ede12b-66f5-42e9-8e6b-77a6e45c3099-kube-api-access-w4q2d\") pod \"openstack-baremetal-operator-controller-manager-694d6cfbd628ppx\" (UID: \"32ede12b-66f5-42e9-8e6b-77a6e45c3099\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.420239 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert\") pod \"openstack-baremetal-operator-controller-manager-694d6cfbd628ppx\" (UID: \"32ede12b-66f5-42e9-8e6b-77a6e45c3099\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.420274 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8jlv\" (UniqueName: \"kubernetes.io/projected/23fa0793-f896-4059-b33e-fe00ea97dbab-kube-api-access-w8jlv\") pod \"octavia-operator-controller-manager-998648c74-tdvd4\" (UID: \"23fa0793-f896-4059-b33e-fe00ea97dbab\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-tdvd4" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.420291 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9zfz\" (UniqueName: \"kubernetes.io/projected/06116776-17f7-42fd-a55c-8965a8932070-kube-api-access-j9zfz\") pod \"nova-operator-controller-manager-697bc559fc-767vs\" (UID: \"06116776-17f7-42fd-a55c-8965a8932070\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-767vs" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.420350 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2r9gl\" (UniqueName: \"kubernetes.io/projected/eb907950-a4b3-4ba5-bea0-3075610995af-kube-api-access-2r9gl\") pod \"ovn-operator-controller-manager-b6456fdb6-g5tv6\" (UID: \"eb907950-a4b3-4ba5-bea0-3075610995af\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g5tv6" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.427821 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcl5d\" (UniqueName: \"kubernetes.io/projected/9b065504-7b79-4bb0-b583-a37953820f14-kube-api-access-tcl5d\") pod \"mariadb-operator-controller-manager-79c8c4686c-mtl8t\" (UID: \"9b065504-7b79-4bb0-b583-a37953820f14\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-mtl8t" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.428578 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-w92jp"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.429722 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7z4v\" (UniqueName: \"kubernetes.io/projected/8d2a8965-e959-4d48-bc75-e91d2c235898-kube-api-access-k7z4v\") pod \"manila-operator-controller-manager-5b5fd79c9c-4j6rt\" (UID: \"8d2a8965-e959-4d48-bc75-e91d2c235898\") " pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-4j6rt" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.429784 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w92jp" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.434766 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-dnqh7" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.443053 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-w92jp"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.455818 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9zfz\" (UniqueName: \"kubernetes.io/projected/06116776-17f7-42fd-a55c-8965a8932070-kube-api-access-j9zfz\") pod \"nova-operator-controller-manager-697bc559fc-767vs\" (UID: \"06116776-17f7-42fd-a55c-8965a8932070\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-767vs" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.465810 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-667bd8d554-dxgfv"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.467448 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-dxgfv" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.482346 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8jlv\" (UniqueName: \"kubernetes.io/projected/23fa0793-f896-4059-b33e-fe00ea97dbab-kube-api-access-w8jlv\") pod \"octavia-operator-controller-manager-998648c74-tdvd4\" (UID: \"23fa0793-f896-4059-b33e-fe00ea97dbab\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-tdvd4" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.496462 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-667bd8d554-dxgfv"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.505364 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-xjrgv" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.512743 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-668858c49-rtprx"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.514363 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.520234 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-8gpsw" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.520444 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.520548 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.523062 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-668858c49-rtprx"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.524637 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4q2d\" (UniqueName: \"kubernetes.io/projected/32ede12b-66f5-42e9-8e6b-77a6e45c3099-kube-api-access-w4q2d\") pod \"openstack-baremetal-operator-controller-manager-694d6cfbd628ppx\" (UID: \"32ede12b-66f5-42e9-8e6b-77a6e45c3099\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.524672 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert\") pod \"openstack-baremetal-operator-controller-manager-694d6cfbd628ppx\" (UID: \"32ede12b-66f5-42e9-8e6b-77a6e45c3099\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.524735 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/06b9f74b-2726-43cb-9353-dec4e4a34f01-cert\") pod \"infra-operator-controller-manager-78d48bff9d-7f8r9\" (UID: \"06b9f74b-2726-43cb-9353-dec4e4a34f01\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.524767 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmnwz\" (UniqueName: \"kubernetes.io/projected/68b73b9b-6bcb-4d42-a879-36107b59e8a8-kube-api-access-hmnwz\") pod \"test-operator-controller-manager-5854674fcc-w92jp\" (UID: \"68b73b9b-6bcb-4d42-a879-36107b59e8a8\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-w92jp" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.524822 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z66pt\" (UniqueName: \"kubernetes.io/projected/9a932c52-665f-4162-8f00-afdc61891dc5-kube-api-access-z66pt\") pod \"telemetry-operator-controller-manager-58d5ff84df-kt2js\" (UID: \"9a932c52-665f-4162-8f00-afdc61891dc5\") " pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-kt2js" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.524857 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2r9gl\" (UniqueName: \"kubernetes.io/projected/eb907950-a4b3-4ba5-bea0-3075610995af-kube-api-access-2r9gl\") pod \"ovn-operator-controller-manager-b6456fdb6-g5tv6\" (UID: \"eb907950-a4b3-4ba5-bea0-3075610995af\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g5tv6" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.524896 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfhpf\" (UniqueName: \"kubernetes.io/projected/dbe2661b-7d79-49f0-9d56-5b66f440670f-kube-api-access-jfhpf\") pod \"swift-operator-controller-manager-9d58d64bc-pwpkv\" (UID: \"dbe2661b-7d79-49f0-9d56-5b66f440670f\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-pwpkv" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.524940 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmsfr\" (UniqueName: \"kubernetes.io/projected/ccb3c4ba-9eab-4b59-85a4-e672a4310cf5-kube-api-access-qmsfr\") pod \"placement-operator-controller-manager-78f8948974-p286q\" (UID: \"ccb3c4ba-9eab-4b59-85a4-e672a4310cf5\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-p286q" Dec 10 07:04:17 crc kubenswrapper[4765]: E1210 07:04:17.525551 4765 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 07:04:17 crc kubenswrapper[4765]: E1210 07:04:17.525602 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert podName:32ede12b-66f5-42e9-8e6b-77a6e45c3099 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:18.025583552 +0000 UTC m=+977.752248868 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert") pod "openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" (UID: "32ede12b-66f5-42e9-8e6b-77a6e45c3099") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 07:04:17 crc kubenswrapper[4765]: E1210 07:04:17.525660 4765 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 10 07:04:17 crc kubenswrapper[4765]: E1210 07:04:17.525692 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06b9f74b-2726-43cb-9353-dec4e4a34f01-cert podName:06b9f74b-2726-43cb-9353-dec4e4a34f01 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:18.525679795 +0000 UTC m=+978.252345111 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/06b9f74b-2726-43cb-9353-dec4e4a34f01-cert") pod "infra-operator-controller-manager-78d48bff9d-7f8r9" (UID: "06b9f74b-2726-43cb-9353-dec4e4a34f01") : secret "infra-operator-webhook-server-cert" not found Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.540031 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-tdvd4" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.547742 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-rg5bf"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.548935 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-rg5bf" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.550868 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-zzkt6" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.554062 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-rg5bf"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.554960 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2r9gl\" (UniqueName: \"kubernetes.io/projected/eb907950-a4b3-4ba5-bea0-3075610995af-kube-api-access-2r9gl\") pod \"ovn-operator-controller-manager-b6456fdb6-g5tv6\" (UID: \"eb907950-a4b3-4ba5-bea0-3075610995af\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g5tv6" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.606231 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-25qhw" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.607135 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-4j6rt" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.625931 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2kr2\" (UniqueName: \"kubernetes.io/projected/b4131e33-88a7-4b19-8ffc-4029eec86cd3-kube-api-access-b2kr2\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.626033 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmnwz\" (UniqueName: \"kubernetes.io/projected/68b73b9b-6bcb-4d42-a879-36107b59e8a8-kube-api-access-hmnwz\") pod \"test-operator-controller-manager-5854674fcc-w92jp\" (UID: \"68b73b9b-6bcb-4d42-a879-36107b59e8a8\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-w92jp" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.626073 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z66pt\" (UniqueName: \"kubernetes.io/projected/9a932c52-665f-4162-8f00-afdc61891dc5-kube-api-access-z66pt\") pod \"telemetry-operator-controller-manager-58d5ff84df-kt2js\" (UID: \"9a932c52-665f-4162-8f00-afdc61891dc5\") " pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-kt2js" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.629938 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4q2d\" (UniqueName: \"kubernetes.io/projected/32ede12b-66f5-42e9-8e6b-77a6e45c3099-kube-api-access-w4q2d\") pod \"openstack-baremetal-operator-controller-manager-694d6cfbd628ppx\" (UID: \"32ede12b-66f5-42e9-8e6b-77a6e45c3099\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.635333 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfhpf\" (UniqueName: \"kubernetes.io/projected/dbe2661b-7d79-49f0-9d56-5b66f440670f-kube-api-access-jfhpf\") pod \"swift-operator-controller-manager-9d58d64bc-pwpkv\" (UID: \"dbe2661b-7d79-49f0-9d56-5b66f440670f\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-pwpkv" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.635415 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.635463 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.635505 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmsfr\" (UniqueName: \"kubernetes.io/projected/ccb3c4ba-9eab-4b59-85a4-e672a4310cf5-kube-api-access-qmsfr\") pod \"placement-operator-controller-manager-78f8948974-p286q\" (UID: \"ccb3c4ba-9eab-4b59-85a4-e672a4310cf5\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-p286q" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.635531 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drlkm\" (UniqueName: \"kubernetes.io/projected/b9667d31-f2e0-4172-bdc6-c35854e9f81a-kube-api-access-drlkm\") pod \"watcher-operator-controller-manager-667bd8d554-dxgfv\" (UID: \"b9667d31-f2e0-4172-bdc6-c35854e9f81a\") " pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-dxgfv" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.636291 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wrjkf" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.650947 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmnwz\" (UniqueName: \"kubernetes.io/projected/68b73b9b-6bcb-4d42-a879-36107b59e8a8-kube-api-access-hmnwz\") pod \"test-operator-controller-manager-5854674fcc-w92jp\" (UID: \"68b73b9b-6bcb-4d42-a879-36107b59e8a8\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-w92jp" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.667680 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z66pt\" (UniqueName: \"kubernetes.io/projected/9a932c52-665f-4162-8f00-afdc61891dc5-kube-api-access-z66pt\") pod \"telemetry-operator-controller-manager-58d5ff84df-kt2js\" (UID: \"9a932c52-665f-4162-8f00-afdc61891dc5\") " pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-kt2js" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.667982 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vgspd" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.671750 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-zd5jq"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.672387 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmsfr\" (UniqueName: \"kubernetes.io/projected/ccb3c4ba-9eab-4b59-85a4-e672a4310cf5-kube-api-access-qmsfr\") pod \"placement-operator-controller-manager-78f8948974-p286q\" (UID: \"ccb3c4ba-9eab-4b59-85a4-e672a4310cf5\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-p286q" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.676226 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfhpf\" (UniqueName: \"kubernetes.io/projected/dbe2661b-7d79-49f0-9d56-5b66f440670f-kube-api-access-jfhpf\") pod \"swift-operator-controller-manager-9d58d64bc-pwpkv\" (UID: \"dbe2661b-7d79-49f0-9d56-5b66f440670f\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-pwpkv" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.685179 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-pwpkv" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.704022 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-mtl8t" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.712524 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-767vs" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.721185 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-kt2js" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.731838 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g5tv6" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.792323 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w92jp" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.793277 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whphn\" (UniqueName: \"kubernetes.io/projected/29a0de10-d351-4d7c-9dfa-38e628ce116d-kube-api-access-whphn\") pod \"rabbitmq-cluster-operator-manager-668c99d594-rg5bf\" (UID: \"29a0de10-d351-4d7c-9dfa-38e628ce116d\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-rg5bf" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.793562 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.793601 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.793651 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drlkm\" (UniqueName: \"kubernetes.io/projected/b9667d31-f2e0-4172-bdc6-c35854e9f81a-kube-api-access-drlkm\") pod \"watcher-operator-controller-manager-667bd8d554-dxgfv\" (UID: \"b9667d31-f2e0-4172-bdc6-c35854e9f81a\") " pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-dxgfv" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.793725 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2kr2\" (UniqueName: \"kubernetes.io/projected/b4131e33-88a7-4b19-8ffc-4029eec86cd3-kube-api-access-b2kr2\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:17 crc kubenswrapper[4765]: E1210 07:04:17.795374 4765 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 10 07:04:17 crc kubenswrapper[4765]: E1210 07:04:17.795417 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs podName:b4131e33-88a7-4b19-8ffc-4029eec86cd3 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:18.295402883 +0000 UTC m=+978.022068199 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs") pod "openstack-operator-controller-manager-668858c49-rtprx" (UID: "b4131e33-88a7-4b19-8ffc-4029eec86cd3") : secret "metrics-server-cert" not found Dec 10 07:04:17 crc kubenswrapper[4765]: E1210 07:04:17.795475 4765 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 10 07:04:17 crc kubenswrapper[4765]: E1210 07:04:17.795500 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs podName:b4131e33-88a7-4b19-8ffc-4029eec86cd3 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:18.295492825 +0000 UTC m=+978.022158141 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs") pod "openstack-operator-controller-manager-668858c49-rtprx" (UID: "b4131e33-88a7-4b19-8ffc-4029eec86cd3") : secret "webhook-server-cert" not found Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.813333 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vgspd"] Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.830345 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2kr2\" (UniqueName: \"kubernetes.io/projected/b4131e33-88a7-4b19-8ffc-4029eec86cd3-kube-api-access-b2kr2\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.897591 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whphn\" (UniqueName: \"kubernetes.io/projected/29a0de10-d351-4d7c-9dfa-38e628ce116d-kube-api-access-whphn\") pod \"rabbitmq-cluster-operator-manager-668c99d594-rg5bf\" (UID: \"29a0de10-d351-4d7c-9dfa-38e628ce116d\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-rg5bf" Dec 10 07:04:17 crc kubenswrapper[4765]: I1210 07:04:17.951809 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-p286q" Dec 10 07:04:18 crc kubenswrapper[4765]: I1210 07:04:18.279894 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert\") pod \"openstack-baremetal-operator-controller-manager-694d6cfbd628ppx\" (UID: \"32ede12b-66f5-42e9-8e6b-77a6e45c3099\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" Dec 10 07:04:18 crc kubenswrapper[4765]: E1210 07:04:18.280721 4765 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 07:04:18 crc kubenswrapper[4765]: E1210 07:04:18.280828 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert podName:32ede12b-66f5-42e9-8e6b-77a6e45c3099 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:19.280809219 +0000 UTC m=+979.007474535 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert") pod "openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" (UID: "32ede12b-66f5-42e9-8e6b-77a6e45c3099") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 07:04:18 crc kubenswrapper[4765]: I1210 07:04:18.312076 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drlkm\" (UniqueName: \"kubernetes.io/projected/b9667d31-f2e0-4172-bdc6-c35854e9f81a-kube-api-access-drlkm\") pod \"watcher-operator-controller-manager-667bd8d554-dxgfv\" (UID: \"b9667d31-f2e0-4172-bdc6-c35854e9f81a\") " pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-dxgfv" Dec 10 07:04:18 crc kubenswrapper[4765]: I1210 07:04:18.376794 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-dxgfv" Dec 10 07:04:18 crc kubenswrapper[4765]: I1210 07:04:18.383517 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:18 crc kubenswrapper[4765]: I1210 07:04:18.383725 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:18 crc kubenswrapper[4765]: E1210 07:04:18.385432 4765 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 10 07:04:18 crc kubenswrapper[4765]: E1210 07:04:18.385491 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs podName:b4131e33-88a7-4b19-8ffc-4029eec86cd3 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:19.385474279 +0000 UTC m=+979.112139595 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs") pod "openstack-operator-controller-manager-668858c49-rtprx" (UID: "b4131e33-88a7-4b19-8ffc-4029eec86cd3") : secret "webhook-server-cert" not found Dec 10 07:04:18 crc kubenswrapper[4765]: E1210 07:04:18.385533 4765 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 10 07:04:18 crc kubenswrapper[4765]: E1210 07:04:18.385553 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs podName:b4131e33-88a7-4b19-8ffc-4029eec86cd3 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:19.385545761 +0000 UTC m=+979.112211077 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs") pod "openstack-operator-controller-manager-668858c49-rtprx" (UID: "b4131e33-88a7-4b19-8ffc-4029eec86cd3") : secret "metrics-server-cert" not found Dec 10 07:04:18 crc kubenswrapper[4765]: I1210 07:04:18.425920 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whphn\" (UniqueName: \"kubernetes.io/projected/29a0de10-d351-4d7c-9dfa-38e628ce116d-kube-api-access-whphn\") pod \"rabbitmq-cluster-operator-manager-668c99d594-rg5bf\" (UID: \"29a0de10-d351-4d7c-9dfa-38e628ce116d\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-rg5bf" Dec 10 07:04:18 crc kubenswrapper[4765]: I1210 07:04:18.475825 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-9jg4f"] Dec 10 07:04:18 crc kubenswrapper[4765]: I1210 07:04:18.554784 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-rg5bf" Dec 10 07:04:18 crc kubenswrapper[4765]: I1210 07:04:18.587838 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/06b9f74b-2726-43cb-9353-dec4e4a34f01-cert\") pod \"infra-operator-controller-manager-78d48bff9d-7f8r9\" (UID: \"06b9f74b-2726-43cb-9353-dec4e4a34f01\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" Dec 10 07:04:18 crc kubenswrapper[4765]: E1210 07:04:18.588032 4765 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 10 07:04:18 crc kubenswrapper[4765]: E1210 07:04:18.588079 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06b9f74b-2726-43cb-9353-dec4e4a34f01-cert podName:06b9f74b-2726-43cb-9353-dec4e4a34f01 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:20.588063666 +0000 UTC m=+980.314728982 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/06b9f74b-2726-43cb-9353-dec4e4a34f01-cert") pod "infra-operator-controller-manager-78d48bff9d-7f8r9" (UID: "06b9f74b-2726-43cb-9353-dec4e4a34f01") : secret "infra-operator-webhook-server-cert" not found Dec 10 07:04:18 crc kubenswrapper[4765]: I1210 07:04:18.621844 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zd5jq" event={"ID":"98ff262d-fdd9-4e2e-9cd9-4f570716bf02","Type":"ContainerStarted","Data":"2b7e097c426cecf460845bc50bb3cc528b7c189066e995dc95350bb7e548e1d8"} Dec 10 07:04:18 crc kubenswrapper[4765]: I1210 07:04:18.806631 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-kgvfr"] Dec 10 07:04:18 crc kubenswrapper[4765]: I1210 07:04:18.970413 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-xqrjv"] Dec 10 07:04:19 crc kubenswrapper[4765]: W1210 07:04:19.082464 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf8c7e1f9_bf7b_4b50_9ee2_b2e73735e720.slice/crio-cf8a086b3f627958b60e60b27121c1c837cde538780b310a3c7596ea96abf7fa WatchSource:0}: Error finding container cf8a086b3f627958b60e60b27121c1c837cde538780b310a3c7596ea96abf7fa: Status 404 returned error can't find the container with id cf8a086b3f627958b60e60b27121c1c837cde538780b310a3c7596ea96abf7fa Dec 10 07:04:19 crc kubenswrapper[4765]: I1210 07:04:19.298183 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert\") pod \"openstack-baremetal-operator-controller-manager-694d6cfbd628ppx\" (UID: \"32ede12b-66f5-42e9-8e6b-77a6e45c3099\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" Dec 10 07:04:19 crc kubenswrapper[4765]: E1210 07:04:19.298365 4765 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 07:04:19 crc kubenswrapper[4765]: E1210 07:04:19.298448 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert podName:32ede12b-66f5-42e9-8e6b-77a6e45c3099 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:21.298430327 +0000 UTC m=+981.025095643 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert") pod "openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" (UID: "32ede12b-66f5-42e9-8e6b-77a6e45c3099") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 07:04:19 crc kubenswrapper[4765]: I1210 07:04:19.399823 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:19 crc kubenswrapper[4765]: I1210 07:04:19.399910 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:19 crc kubenswrapper[4765]: E1210 07:04:19.400065 4765 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 10 07:04:19 crc kubenswrapper[4765]: E1210 07:04:19.400189 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs podName:b4131e33-88a7-4b19-8ffc-4029eec86cd3 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:21.400161613 +0000 UTC m=+981.126827119 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs") pod "openstack-operator-controller-manager-668858c49-rtprx" (UID: "b4131e33-88a7-4b19-8ffc-4029eec86cd3") : secret "webhook-server-cert" not found Dec 10 07:04:19 crc kubenswrapper[4765]: E1210 07:04:19.400298 4765 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 10 07:04:19 crc kubenswrapper[4765]: E1210 07:04:19.400398 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs podName:b4131e33-88a7-4b19-8ffc-4029eec86cd3 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:21.400370739 +0000 UTC m=+981.127036255 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs") pod "openstack-operator-controller-manager-668858c49-rtprx" (UID: "b4131e33-88a7-4b19-8ffc-4029eec86cd3") : secret "metrics-server-cert" not found Dec 10 07:04:19 crc kubenswrapper[4765]: I1210 07:04:19.637306 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-xqrjv" event={"ID":"f8c7e1f9-bf7b-4b50-9ee2-b2e73735e720","Type":"ContainerStarted","Data":"cf8a086b3f627958b60e60b27121c1c837cde538780b310a3c7596ea96abf7fa"} Dec 10 07:04:19 crc kubenswrapper[4765]: I1210 07:04:19.639124 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-kgvfr" event={"ID":"1ccf5120-6d6b-49e6-ae2d-08464b3ab398","Type":"ContainerStarted","Data":"ef13cabbd0ca6e0cfa9b2e2ff8253bc1a22fcc82342e3a97bd221ace3dfe63b4"} Dec 10 07:04:19 crc kubenswrapper[4765]: I1210 07:04:19.640857 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-9jg4f" event={"ID":"87a90f47-7e73-45ff-9f98-93bec3ebe12e","Type":"ContainerStarted","Data":"a770aa69d44aa7eb4ac598866673719230f63e5ea0ef1a415e89aba1d3c059f9"} Dec 10 07:04:19 crc kubenswrapper[4765]: I1210 07:04:19.641017 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vgspd" podUID="3d5b1a2a-4382-4aa4-9a12-676b126e421b" containerName="registry-server" containerID="cri-o://cc9e0ce148c1061ddb8530580be4223fa4e477660fe7f5db8b1f868d98d0db49" gracePeriod=2 Dec 10 07:04:19 crc kubenswrapper[4765]: I1210 07:04:19.804918 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-mjmkd"] Dec 10 07:04:19 crc kubenswrapper[4765]: I1210 07:04:19.824144 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-25qhw"] Dec 10 07:04:19 crc kubenswrapper[4765]: I1210 07:04:19.924257 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-hbkmm"] Dec 10 07:04:19 crc kubenswrapper[4765]: W1210 07:04:19.925279 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb079565_c2aa_4756_b335_df9a6dac3758.slice/crio-f8d222213d68f0350fbe2e61a59e2471a8dbf243ebaee56c6eb34131c981873b WatchSource:0}: Error finding container f8d222213d68f0350fbe2e61a59e2471a8dbf243ebaee56c6eb34131c981873b: Status 404 returned error can't find the container with id f8d222213d68f0350fbe2e61a59e2471a8dbf243ebaee56c6eb34131c981873b Dec 10 07:04:19 crc kubenswrapper[4765]: I1210 07:04:19.987458 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-sfbj7"] Dec 10 07:04:20 crc kubenswrapper[4765]: W1210 07:04:20.011205 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod85f78838_e7c3_4c58_8be9_5cb847b81d6d.slice/crio-a944a9cfeb77b2b2f673f3b9066326c7bbafcb43d36f1741b4320252c9040fa8 WatchSource:0}: Error finding container a944a9cfeb77b2b2f673f3b9066326c7bbafcb43d36f1741b4320252c9040fa8: Status 404 returned error can't find the container with id a944a9cfeb77b2b2f673f3b9066326c7bbafcb43d36f1741b4320252c9040fa8 Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.193986 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-mtl8t"] Dec 10 07:04:20 crc kubenswrapper[4765]: W1210 07:04:20.204926 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b065504_7b79_4bb0_b583_a37953820f14.slice/crio-6937d2830725f4a191fe31ce48ebb7893e84bc41b72f7a55142ed1153234e728 WatchSource:0}: Error finding container 6937d2830725f4a191fe31ce48ebb7893e84bc41b72f7a55142ed1153234e728: Status 404 returned error can't find the container with id 6937d2830725f4a191fe31ce48ebb7893e84bc41b72f7a55142ed1153234e728 Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.214804 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-g5tv6"] Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.228866 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-p286q"] Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.247292 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wrjkf"] Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.267368 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5b5fd79c9c-4j6rt"] Dec 10 07:04:20 crc kubenswrapper[4765]: W1210 07:04:20.279205 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8d2a8965_e959_4d48_bc75_e91d2c235898.slice/crio-77ea1c52b84eaaf5f9d0e77c42dd3ee669a12a47e86e0bbb19774c5de05431f1 WatchSource:0}: Error finding container 77ea1c52b84eaaf5f9d0e77c42dd3ee669a12a47e86e0bbb19774c5de05431f1: Status 404 returned error can't find the container with id 77ea1c52b84eaaf5f9d0e77c42dd3ee669a12a47e86e0bbb19774c5de05431f1 Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.283545 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-58d5ff84df-kt2js"] Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.306249 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-767vs"] Dec 10 07:04:20 crc kubenswrapper[4765]: W1210 07:04:20.310607 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9a932c52_665f_4162_8f00_afdc61891dc5.slice/crio-2588e9e1acd71b02f82f3394bf86ca8a5d7bbd48930dcc1675e074c82aa5041b WatchSource:0}: Error finding container 2588e9e1acd71b02f82f3394bf86ca8a5d7bbd48930dcc1675e074c82aa5041b: Status 404 returned error can't find the container with id 2588e9e1acd71b02f82f3394bf86ca8a5d7bbd48930dcc1675e074c82aa5041b Dec 10 07:04:20 crc kubenswrapper[4765]: E1210 07:04:20.322240 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-w8jlv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-tdvd4_openstack-operators(23fa0793-f896-4059-b33e-fe00ea97dbab): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 07:04:20 crc kubenswrapper[4765]: E1210 07:04:20.323105 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-whphn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-rg5bf_openstack-operators(29a0de10-d351-4d7c-9dfa-38e628ce116d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.324209 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-667bd8d554-dxgfv"] Dec 10 07:04:20 crc kubenswrapper[4765]: E1210 07:04:20.324265 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-rg5bf" podUID="29a0de10-d351-4d7c-9dfa-38e628ce116d" Dec 10 07:04:20 crc kubenswrapper[4765]: W1210 07:04:20.325733 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod68b73b9b_6bcb_4d42_a879_36107b59e8a8.slice/crio-6e4754f443e5e171e120b35136ef17d07334d4cc779c9fd6b2ed5ac0ebdb1f05 WatchSource:0}: Error finding container 6e4754f443e5e171e120b35136ef17d07334d4cc779c9fd6b2ed5ac0ebdb1f05: Status 404 returned error can't find the container with id 6e4754f443e5e171e120b35136ef17d07334d4cc779c9fd6b2ed5ac0ebdb1f05 Dec 10 07:04:20 crc kubenswrapper[4765]: E1210 07:04:20.326280 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-w8jlv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-tdvd4_openstack-operators(23fa0793-f896-4059-b33e-fe00ea97dbab): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 07:04:20 crc kubenswrapper[4765]: E1210 07:04:20.326387 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z66pt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-58d5ff84df-kt2js_openstack-operators(9a932c52-665f-4162-8f00-afdc61891dc5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 07:04:20 crc kubenswrapper[4765]: E1210 07:04:20.327136 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hmnwz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-w92jp_openstack-operators(68b73b9b-6bcb-4d42-a879-36107b59e8a8): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 07:04:20 crc kubenswrapper[4765]: E1210 07:04:20.328351 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-tdvd4" podUID="23fa0793-f896-4059-b33e-fe00ea97dbab" Dec 10 07:04:20 crc kubenswrapper[4765]: E1210 07:04:20.329495 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hmnwz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-w92jp_openstack-operators(68b73b9b-6bcb-4d42-a879-36107b59e8a8): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 07:04:20 crc kubenswrapper[4765]: E1210 07:04:20.329702 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z66pt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-58d5ff84df-kt2js_openstack-operators(9a932c52-665f-4162-8f00-afdc61891dc5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 07:04:20 crc kubenswrapper[4765]: E1210 07:04:20.331038 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-kt2js" podUID="9a932c52-665f-4162-8f00-afdc61891dc5" Dec 10 07:04:20 crc kubenswrapper[4765]: E1210 07:04:20.331110 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w92jp" podUID="68b73b9b-6bcb-4d42-a879-36107b59e8a8" Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.332203 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-rg5bf"] Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.344732 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-pwpkv"] Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.352101 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-tdvd4"] Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.372879 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-w92jp"] Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.638301 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/06b9f74b-2726-43cb-9353-dec4e4a34f01-cert\") pod \"infra-operator-controller-manager-78d48bff9d-7f8r9\" (UID: \"06b9f74b-2726-43cb-9353-dec4e4a34f01\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" Dec 10 07:04:20 crc kubenswrapper[4765]: E1210 07:04:20.638510 4765 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 10 07:04:20 crc kubenswrapper[4765]: E1210 07:04:20.638587 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06b9f74b-2726-43cb-9353-dec4e4a34f01-cert podName:06b9f74b-2726-43cb-9353-dec4e4a34f01 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:24.638568524 +0000 UTC m=+984.365233840 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/06b9f74b-2726-43cb-9353-dec4e4a34f01-cert") pod "infra-operator-controller-manager-78d48bff9d-7f8r9" (UID: "06b9f74b-2726-43cb-9353-dec4e4a34f01") : secret "infra-operator-webhook-server-cert" not found Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.655896 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-pwpkv" event={"ID":"dbe2661b-7d79-49f0-9d56-5b66f440670f","Type":"ContainerStarted","Data":"b48da61c1e6906d7fbfeba0f767c1bfa58e28079408d446f6b706c138bea8dde"} Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.657341 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-dxgfv" event={"ID":"b9667d31-f2e0-4172-bdc6-c35854e9f81a","Type":"ContainerStarted","Data":"576013ed8e70fc7ea3a6a30e2e4bf96362ad7aba20e9542afd3c9e0412ac548c"} Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.658482 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g5tv6" event={"ID":"eb907950-a4b3-4ba5-bea0-3075610995af","Type":"ContainerStarted","Data":"30e9bd23dd45a6782f7fafef9f759e7f23b7dd9900441b263f2a22b57354c5d2"} Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.661227 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgspd" event={"ID":"3d5b1a2a-4382-4aa4-9a12-676b126e421b","Type":"ContainerDied","Data":"cc9e0ce148c1061ddb8530580be4223fa4e477660fe7f5db8b1f868d98d0db49"} Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.661252 4765 generic.go:334] "Generic (PLEG): container finished" podID="3d5b1a2a-4382-4aa4-9a12-676b126e421b" containerID="cc9e0ce148c1061ddb8530580be4223fa4e477660fe7f5db8b1f868d98d0db49" exitCode=0 Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.662659 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-mtl8t" event={"ID":"9b065504-7b79-4bb0-b583-a37953820f14","Type":"ContainerStarted","Data":"6937d2830725f4a191fe31ce48ebb7893e84bc41b72f7a55142ed1153234e728"} Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.663885 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-hbkmm" event={"ID":"eb079565-c2aa-4756-b335-df9a6dac3758","Type":"ContainerStarted","Data":"f8d222213d68f0350fbe2e61a59e2471a8dbf243ebaee56c6eb34131c981873b"} Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.665111 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-4j6rt" event={"ID":"8d2a8965-e959-4d48-bc75-e91d2c235898","Type":"ContainerStarted","Data":"77ea1c52b84eaaf5f9d0e77c42dd3ee669a12a47e86e0bbb19774c5de05431f1"} Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.666956 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-p286q" event={"ID":"ccb3c4ba-9eab-4b59-85a4-e672a4310cf5","Type":"ContainerStarted","Data":"c15a682c886ffaf62a82fd8d6dc02f3fee7152f7ceec81595c0cefc20f24d6ae"} Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.668457 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wrjkf" event={"ID":"44bbc33f-2848-42f4-b8e9-d99ba69ea07b","Type":"ContainerStarted","Data":"1f5a6a675f8fefa2221f7d20cfac72f9fc5d99870150778d364d27fc1fb51fd3"} Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.672125 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-25qhw" event={"ID":"f9c242b2-1388-431e-8c76-1022426252c7","Type":"ContainerStarted","Data":"b339e976b70ea7b7001fd8e379983b2eab739bdb57752f24483611d95634e468"} Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.674065 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-rg5bf" event={"ID":"29a0de10-d351-4d7c-9dfa-38e628ce116d","Type":"ContainerStarted","Data":"23307822f7778c10fb417a49f2413a0b2462e37c05a618eb548e45d83f4e7a4d"} Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.676808 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-767vs" event={"ID":"06116776-17f7-42fd-a55c-8965a8932070","Type":"ContainerStarted","Data":"f7957f383fdfa75eeb0bbbade4748d3bc44be8dab264d8f3a532092bb7e1ce67"} Dec 10 07:04:20 crc kubenswrapper[4765]: E1210 07:04:20.677425 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-rg5bf" podUID="29a0de10-d351-4d7c-9dfa-38e628ce116d" Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.679201 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w92jp" event={"ID":"68b73b9b-6bcb-4d42-a879-36107b59e8a8","Type":"ContainerStarted","Data":"6e4754f443e5e171e120b35136ef17d07334d4cc779c9fd6b2ed5ac0ebdb1f05"} Dec 10 07:04:20 crc kubenswrapper[4765]: E1210 07:04:20.684170 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w92jp" podUID="68b73b9b-6bcb-4d42-a879-36107b59e8a8" Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.688322 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-kt2js" event={"ID":"9a932c52-665f-4162-8f00-afdc61891dc5","Type":"ContainerStarted","Data":"2588e9e1acd71b02f82f3394bf86ca8a5d7bbd48930dcc1675e074c82aa5041b"} Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.689850 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-sfbj7" event={"ID":"85f78838-e7c3-4c58-8be9-5cb847b81d6d","Type":"ContainerStarted","Data":"a944a9cfeb77b2b2f673f3b9066326c7bbafcb43d36f1741b4320252c9040fa8"} Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.690832 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-mjmkd" event={"ID":"f883d81e-6c50-4d92-878d-253a954fcd7a","Type":"ContainerStarted","Data":"8534732dfc9a8081af562be865d6283b29423fff7945e3505de4e788d8e31cb9"} Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.692992 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-tdvd4" event={"ID":"23fa0793-f896-4059-b33e-fe00ea97dbab","Type":"ContainerStarted","Data":"9b1dd751b4cd1ec1058c139e4eb629e7c64beb23b7bc8d68c092f1159662984c"} Dec 10 07:04:20 crc kubenswrapper[4765]: E1210 07:04:20.698634 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-tdvd4" podUID="23fa0793-f896-4059-b33e-fe00ea97dbab" Dec 10 07:04:20 crc kubenswrapper[4765]: E1210 07:04:20.699727 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-kt2js" podUID="9a932c52-665f-4162-8f00-afdc61891dc5" Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.728441 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vgspd" Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.739060 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d5b1a2a-4382-4aa4-9a12-676b126e421b-catalog-content\") pod \"3d5b1a2a-4382-4aa4-9a12-676b126e421b\" (UID: \"3d5b1a2a-4382-4aa4-9a12-676b126e421b\") " Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.739215 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5zjv\" (UniqueName: \"kubernetes.io/projected/3d5b1a2a-4382-4aa4-9a12-676b126e421b-kube-api-access-f5zjv\") pod \"3d5b1a2a-4382-4aa4-9a12-676b126e421b\" (UID: \"3d5b1a2a-4382-4aa4-9a12-676b126e421b\") " Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.739244 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d5b1a2a-4382-4aa4-9a12-676b126e421b-utilities\") pod \"3d5b1a2a-4382-4aa4-9a12-676b126e421b\" (UID: \"3d5b1a2a-4382-4aa4-9a12-676b126e421b\") " Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.745078 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d5b1a2a-4382-4aa4-9a12-676b126e421b-utilities" (OuterVolumeSpecName: "utilities") pod "3d5b1a2a-4382-4aa4-9a12-676b126e421b" (UID: "3d5b1a2a-4382-4aa4-9a12-676b126e421b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.747522 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d5b1a2a-4382-4aa4-9a12-676b126e421b-kube-api-access-f5zjv" (OuterVolumeSpecName: "kube-api-access-f5zjv") pod "3d5b1a2a-4382-4aa4-9a12-676b126e421b" (UID: "3d5b1a2a-4382-4aa4-9a12-676b126e421b"). InnerVolumeSpecName "kube-api-access-f5zjv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.841684 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5zjv\" (UniqueName: \"kubernetes.io/projected/3d5b1a2a-4382-4aa4-9a12-676b126e421b-kube-api-access-f5zjv\") on node \"crc\" DevicePath \"\"" Dec 10 07:04:20 crc kubenswrapper[4765]: I1210 07:04:20.841723 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d5b1a2a-4382-4aa4-9a12-676b126e421b-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:04:21 crc kubenswrapper[4765]: I1210 07:04:21.351866 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert\") pod \"openstack-baremetal-operator-controller-manager-694d6cfbd628ppx\" (UID: \"32ede12b-66f5-42e9-8e6b-77a6e45c3099\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" Dec 10 07:04:21 crc kubenswrapper[4765]: E1210 07:04:21.352029 4765 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 07:04:21 crc kubenswrapper[4765]: E1210 07:04:21.352097 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert podName:32ede12b-66f5-42e9-8e6b-77a6e45c3099 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:25.352066095 +0000 UTC m=+985.078731411 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert") pod "openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" (UID: "32ede12b-66f5-42e9-8e6b-77a6e45c3099") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 07:04:21 crc kubenswrapper[4765]: I1210 07:04:21.420803 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d5b1a2a-4382-4aa4-9a12-676b126e421b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3d5b1a2a-4382-4aa4-9a12-676b126e421b" (UID: "3d5b1a2a-4382-4aa4-9a12-676b126e421b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:04:21 crc kubenswrapper[4765]: I1210 07:04:21.454222 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:21 crc kubenswrapper[4765]: I1210 07:04:21.454268 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:21 crc kubenswrapper[4765]: I1210 07:04:21.454402 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d5b1a2a-4382-4aa4-9a12-676b126e421b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:04:21 crc kubenswrapper[4765]: E1210 07:04:21.454416 4765 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 10 07:04:21 crc kubenswrapper[4765]: E1210 07:04:21.454491 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs podName:b4131e33-88a7-4b19-8ffc-4029eec86cd3 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:25.45447133 +0000 UTC m=+985.181136646 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs") pod "openstack-operator-controller-manager-668858c49-rtprx" (UID: "b4131e33-88a7-4b19-8ffc-4029eec86cd3") : secret "webhook-server-cert" not found Dec 10 07:04:21 crc kubenswrapper[4765]: E1210 07:04:21.454492 4765 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 10 07:04:21 crc kubenswrapper[4765]: E1210 07:04:21.454549 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs podName:b4131e33-88a7-4b19-8ffc-4029eec86cd3 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:25.454529341 +0000 UTC m=+985.181194757 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs") pod "openstack-operator-controller-manager-668858c49-rtprx" (UID: "b4131e33-88a7-4b19-8ffc-4029eec86cd3") : secret "metrics-server-cert" not found Dec 10 07:04:21 crc kubenswrapper[4765]: I1210 07:04:21.715355 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vgspd" Dec 10 07:04:21 crc kubenswrapper[4765]: I1210 07:04:21.715557 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgspd" event={"ID":"3d5b1a2a-4382-4aa4-9a12-676b126e421b","Type":"ContainerDied","Data":"548243920d61b7f60810a4c5c3cd36c332dd14928d5b8f80c708ebb7d8a59b39"} Dec 10 07:04:21 crc kubenswrapper[4765]: I1210 07:04:21.716033 4765 scope.go:117] "RemoveContainer" containerID="cc9e0ce148c1061ddb8530580be4223fa4e477660fe7f5db8b1f868d98d0db49" Dec 10 07:04:21 crc kubenswrapper[4765]: E1210 07:04:21.719994 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-rg5bf" podUID="29a0de10-d351-4d7c-9dfa-38e628ce116d" Dec 10 07:04:21 crc kubenswrapper[4765]: E1210 07:04:21.719994 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-tdvd4" podUID="23fa0793-f896-4059-b33e-fe00ea97dbab" Dec 10 07:04:21 crc kubenswrapper[4765]: E1210 07:04:21.724977 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w92jp" podUID="68b73b9b-6bcb-4d42-a879-36107b59e8a8" Dec 10 07:04:21 crc kubenswrapper[4765]: E1210 07:04:21.732166 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-kt2js" podUID="9a932c52-665f-4162-8f00-afdc61891dc5" Dec 10 07:04:21 crc kubenswrapper[4765]: I1210 07:04:21.769509 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vgspd"] Dec 10 07:04:21 crc kubenswrapper[4765]: I1210 07:04:21.773326 4765 scope.go:117] "RemoveContainer" containerID="789fd1afea22152608957718e26e26b002d1ee82226c3445dd48e3da7a2d5fc2" Dec 10 07:04:21 crc kubenswrapper[4765]: I1210 07:04:21.783955 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vgspd"] Dec 10 07:04:21 crc kubenswrapper[4765]: I1210 07:04:21.813481 4765 scope.go:117] "RemoveContainer" containerID="7a8fd38c1b75c2926625cfeba451de9ebd1d74ac0ab1dd4bcc30057f946ae9f2" Dec 10 07:04:22 crc kubenswrapper[4765]: I1210 07:04:22.614958 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d5b1a2a-4382-4aa4-9a12-676b126e421b" path="/var/lib/kubelet/pods/3d5b1a2a-4382-4aa4-9a12-676b126e421b/volumes" Dec 10 07:04:24 crc kubenswrapper[4765]: I1210 07:04:24.737387 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/06b9f74b-2726-43cb-9353-dec4e4a34f01-cert\") pod \"infra-operator-controller-manager-78d48bff9d-7f8r9\" (UID: \"06b9f74b-2726-43cb-9353-dec4e4a34f01\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" Dec 10 07:04:24 crc kubenswrapper[4765]: E1210 07:04:24.737779 4765 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 10 07:04:24 crc kubenswrapper[4765]: E1210 07:04:24.737830 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06b9f74b-2726-43cb-9353-dec4e4a34f01-cert podName:06b9f74b-2726-43cb-9353-dec4e4a34f01 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:32.737813302 +0000 UTC m=+992.464478618 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/06b9f74b-2726-43cb-9353-dec4e4a34f01-cert") pod "infra-operator-controller-manager-78d48bff9d-7f8r9" (UID: "06b9f74b-2726-43cb-9353-dec4e4a34f01") : secret "infra-operator-webhook-server-cert" not found Dec 10 07:04:25 crc kubenswrapper[4765]: I1210 07:04:25.361061 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert\") pod \"openstack-baremetal-operator-controller-manager-694d6cfbd628ppx\" (UID: \"32ede12b-66f5-42e9-8e6b-77a6e45c3099\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" Dec 10 07:04:25 crc kubenswrapper[4765]: E1210 07:04:25.361203 4765 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 07:04:25 crc kubenswrapper[4765]: E1210 07:04:25.361666 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert podName:32ede12b-66f5-42e9-8e6b-77a6e45c3099 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:33.361612838 +0000 UTC m=+993.088278154 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert") pod "openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" (UID: "32ede12b-66f5-42e9-8e6b-77a6e45c3099") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 07:04:25 crc kubenswrapper[4765]: I1210 07:04:25.464988 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:25 crc kubenswrapper[4765]: I1210 07:04:25.466358 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:25 crc kubenswrapper[4765]: E1210 07:04:25.465988 4765 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 10 07:04:25 crc kubenswrapper[4765]: E1210 07:04:25.466538 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs podName:b4131e33-88a7-4b19-8ffc-4029eec86cd3 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:33.466516744 +0000 UTC m=+993.193182060 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs") pod "openstack-operator-controller-manager-668858c49-rtprx" (UID: "b4131e33-88a7-4b19-8ffc-4029eec86cd3") : secret "metrics-server-cert" not found Dec 10 07:04:25 crc kubenswrapper[4765]: E1210 07:04:25.466477 4765 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 10 07:04:25 crc kubenswrapper[4765]: E1210 07:04:25.467270 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs podName:b4131e33-88a7-4b19-8ffc-4029eec86cd3 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:33.467245035 +0000 UTC m=+993.193910351 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs") pod "openstack-operator-controller-manager-668858c49-rtprx" (UID: "b4131e33-88a7-4b19-8ffc-4029eec86cd3") : secret "webhook-server-cert" not found Dec 10 07:04:32 crc kubenswrapper[4765]: I1210 07:04:32.770188 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/06b9f74b-2726-43cb-9353-dec4e4a34f01-cert\") pod \"infra-operator-controller-manager-78d48bff9d-7f8r9\" (UID: \"06b9f74b-2726-43cb-9353-dec4e4a34f01\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" Dec 10 07:04:32 crc kubenswrapper[4765]: I1210 07:04:32.776534 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/06b9f74b-2726-43cb-9353-dec4e4a34f01-cert\") pod \"infra-operator-controller-manager-78d48bff9d-7f8r9\" (UID: \"06b9f74b-2726-43cb-9353-dec4e4a34f01\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" Dec 10 07:04:32 crc kubenswrapper[4765]: I1210 07:04:32.923260 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" Dec 10 07:04:33 crc kubenswrapper[4765]: I1210 07:04:33.384709 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert\") pod \"openstack-baremetal-operator-controller-manager-694d6cfbd628ppx\" (UID: \"32ede12b-66f5-42e9-8e6b-77a6e45c3099\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" Dec 10 07:04:33 crc kubenswrapper[4765]: E1210 07:04:33.384901 4765 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 07:04:33 crc kubenswrapper[4765]: E1210 07:04:33.385171 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert podName:32ede12b-66f5-42e9-8e6b-77a6e45c3099 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:49.385153963 +0000 UTC m=+1009.111819279 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert") pod "openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" (UID: "32ede12b-66f5-42e9-8e6b-77a6e45c3099") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 07:04:33 crc kubenswrapper[4765]: I1210 07:04:33.486921 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:33 crc kubenswrapper[4765]: I1210 07:04:33.487033 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:33 crc kubenswrapper[4765]: E1210 07:04:33.487136 4765 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 10 07:04:33 crc kubenswrapper[4765]: E1210 07:04:33.487231 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs podName:b4131e33-88a7-4b19-8ffc-4029eec86cd3 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:49.487208269 +0000 UTC m=+1009.213873585 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs") pod "openstack-operator-controller-manager-668858c49-rtprx" (UID: "b4131e33-88a7-4b19-8ffc-4029eec86cd3") : secret "webhook-server-cert" not found Dec 10 07:04:33 crc kubenswrapper[4765]: E1210 07:04:33.487245 4765 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 10 07:04:33 crc kubenswrapper[4765]: E1210 07:04:33.487341 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs podName:b4131e33-88a7-4b19-8ffc-4029eec86cd3 nodeName:}" failed. No retries permitted until 2025-12-10 07:04:49.487321402 +0000 UTC m=+1009.213986718 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs") pod "openstack-operator-controller-manager-668858c49-rtprx" (UID: "b4131e33-88a7-4b19-8ffc-4029eec86cd3") : secret "metrics-server-cert" not found Dec 10 07:04:34 crc kubenswrapper[4765]: I1210 07:04:34.049440 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:04:34 crc kubenswrapper[4765]: I1210 07:04:34.049497 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:04:34 crc kubenswrapper[4765]: I1210 07:04:34.049555 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 07:04:34 crc kubenswrapper[4765]: I1210 07:04:34.050248 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1a2948aa41622b94e272b106fd506ab6099b9c866ae8f86fff9e5dbf9e54046e"} pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 07:04:34 crc kubenswrapper[4765]: I1210 07:04:34.050311 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" containerID="cri-o://1a2948aa41622b94e272b106fd506ab6099b9c866ae8f86fff9e5dbf9e54046e" gracePeriod=600 Dec 10 07:04:34 crc kubenswrapper[4765]: I1210 07:04:34.240209 4765 generic.go:334] "Generic (PLEG): container finished" podID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerID="1a2948aa41622b94e272b106fd506ab6099b9c866ae8f86fff9e5dbf9e54046e" exitCode=0 Dec 10 07:04:34 crc kubenswrapper[4765]: I1210 07:04:34.240264 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerDied","Data":"1a2948aa41622b94e272b106fd506ab6099b9c866ae8f86fff9e5dbf9e54046e"} Dec 10 07:04:34 crc kubenswrapper[4765]: I1210 07:04:34.240311 4765 scope.go:117] "RemoveContainer" containerID="c59ae59e8df6d4b5e877dd0073edd70b486d51e68aa6826855c2bd6ec77d4d30" Dec 10 07:04:35 crc kubenswrapper[4765]: I1210 07:04:35.662124 4765 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 07:04:37 crc kubenswrapper[4765]: E1210 07:04:37.176937 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:424da951f13f1fbe9083215dc9f5088f90676dd813f01fdf3c1a8639b61cbaad" Dec 10 07:04:37 crc kubenswrapper[4765]: E1210 07:04:37.177416 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:424da951f13f1fbe9083215dc9f5088f90676dd813f01fdf3c1a8639b61cbaad,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tcl5d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-79c8c4686c-mtl8t_openstack-operators(9b065504-7b79-4bb0-b583-a37953820f14): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:04:37 crc kubenswrapper[4765]: E1210 07:04:37.889538 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:5bdb3685be3ddc1efd62e16aaf2fa96ead64315e26d52b1b2a7d8ac01baa1e87" Dec 10 07:04:37 crc kubenswrapper[4765]: E1210 07:04:37.889824 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:5bdb3685be3ddc1efd62e16aaf2fa96ead64315e26d52b1b2a7d8ac01baa1e87,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-26smp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-967d97867-xqrjv_openstack-operators(f8c7e1f9-bf7b-4b50-9ee2-b2e73735e720): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:04:39 crc kubenswrapper[4765]: E1210 07:04:39.279389 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59" Dec 10 07:04:39 crc kubenswrapper[4765]: E1210 07:04:39.279937 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2r9gl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-g5tv6_openstack-operators(eb907950-a4b3-4ba5-bea0-3075610995af): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:04:40 crc kubenswrapper[4765]: E1210 07:04:40.516036 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:900050d3501c0785b227db34b89883efe68247816e5c7427cacb74f8aa10605a" Dec 10 07:04:40 crc kubenswrapper[4765]: E1210 07:04:40.516274 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:900050d3501c0785b227db34b89883efe68247816e5c7427cacb74f8aa10605a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6gmdl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-697fb699cf-zd5jq_openstack-operators(98ff262d-fdd9-4e2e-9cd9-4f570716bf02): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:04:41 crc kubenswrapper[4765]: E1210 07:04:41.341065 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5" Dec 10 07:04:41 crc kubenswrapper[4765]: E1210 07:04:41.341279 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-r5b65,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-sfbj7_openstack-operators(85f78838-e7c3-4c58-8be9-5cb847b81d6d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:04:41 crc kubenswrapper[4765]: E1210 07:04:41.869273 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea" Dec 10 07:04:41 crc kubenswrapper[4765]: E1210 07:04:41.869659 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nftz9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-mjmkd_openstack-operators(f883d81e-6c50-4d92-878d-253a954fcd7a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:04:43 crc kubenswrapper[4765]: E1210 07:04:43.675535 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429" Dec 10 07:04:43 crc kubenswrapper[4765]: E1210 07:04:43.676466 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gg25q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-hbkmm_openstack-operators(eb079565-c2aa-4756-b335-df9a6dac3758): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:04:44 crc kubenswrapper[4765]: E1210 07:04:44.444326 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557" Dec 10 07:04:44 crc kubenswrapper[4765]: E1210 07:04:44.444558 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-znxgb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-wrjkf_openstack-operators(44bbc33f-2848-42f4-b8e9-d99ba69ea07b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:04:45 crc kubenswrapper[4765]: E1210 07:04:45.002524 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 10 07:04:45 crc kubenswrapper[4765]: E1210 07:04:45.002996 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j9zfz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-767vs_openstack-operators(06116776-17f7-42fd-a55c-8965a8932070): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:04:45 crc kubenswrapper[4765]: E1210 07:04:45.833342 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 10 07:04:45 crc kubenswrapper[4765]: E1210 07:04:45.833623 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xxdx5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-25qhw_openstack-operators(f9c242b2-1388-431e-8c76-1022426252c7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:04:49 crc kubenswrapper[4765]: I1210 07:04:49.446363 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert\") pod \"openstack-baremetal-operator-controller-manager-694d6cfbd628ppx\" (UID: \"32ede12b-66f5-42e9-8e6b-77a6e45c3099\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" Dec 10 07:04:49 crc kubenswrapper[4765]: I1210 07:04:49.451957 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/32ede12b-66f5-42e9-8e6b-77a6e45c3099-cert\") pod \"openstack-baremetal-operator-controller-manager-694d6cfbd628ppx\" (UID: \"32ede12b-66f5-42e9-8e6b-77a6e45c3099\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" Dec 10 07:04:49 crc kubenswrapper[4765]: I1210 07:04:49.547928 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:49 crc kubenswrapper[4765]: I1210 07:04:49.547998 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:49 crc kubenswrapper[4765]: I1210 07:04:49.553372 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-webhook-certs\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:49 crc kubenswrapper[4765]: I1210 07:04:49.553473 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b4131e33-88a7-4b19-8ffc-4029eec86cd3-metrics-certs\") pod \"openstack-operator-controller-manager-668858c49-rtprx\" (UID: \"b4131e33-88a7-4b19-8ffc-4029eec86cd3\") " pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:49 crc kubenswrapper[4765]: I1210 07:04:49.615508 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" Dec 10 07:04:49 crc kubenswrapper[4765]: I1210 07:04:49.619935 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:50 crc kubenswrapper[4765]: E1210 07:04:50.748203 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Dec 10 07:04:50 crc kubenswrapper[4765]: E1210 07:04:50.748393 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-whphn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-rg5bf_openstack-operators(29a0de10-d351-4d7c-9dfa-38e628ce116d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:04:50 crc kubenswrapper[4765]: E1210 07:04:50.749596 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-rg5bf" podUID="29a0de10-d351-4d7c-9dfa-38e628ce116d" Dec 10 07:04:51 crc kubenswrapper[4765]: I1210 07:04:51.441448 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9"] Dec 10 07:04:51 crc kubenswrapper[4765]: I1210 07:04:51.454183 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx"] Dec 10 07:04:51 crc kubenswrapper[4765]: I1210 07:04:51.798174 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-668858c49-rtprx"] Dec 10 07:04:51 crc kubenswrapper[4765]: W1210 07:04:51.862535 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod32ede12b_66f5_42e9_8e6b_77a6e45c3099.slice/crio-b69b2e2b9df97d313427706013af27c07f791a8d89bfeccd8f52d5d87e5db339 WatchSource:0}: Error finding container b69b2e2b9df97d313427706013af27c07f791a8d89bfeccd8f52d5d87e5db339: Status 404 returned error can't find the container with id b69b2e2b9df97d313427706013af27c07f791a8d89bfeccd8f52d5d87e5db339 Dec 10 07:04:51 crc kubenswrapper[4765]: W1210 07:04:51.863183 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb4131e33_88a7_4b19_8ffc_4029eec86cd3.slice/crio-07f86d8042e621ea53f48e4ea4dee4712e6c172eaed9d54f66554f54ccc06319 WatchSource:0}: Error finding container 07f86d8042e621ea53f48e4ea4dee4712e6c172eaed9d54f66554f54ccc06319: Status 404 returned error can't find the container with id 07f86d8042e621ea53f48e4ea4dee4712e6c172eaed9d54f66554f54ccc06319 Dec 10 07:04:52 crc kubenswrapper[4765]: I1210 07:04:52.433188 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" event={"ID":"06b9f74b-2726-43cb-9353-dec4e4a34f01","Type":"ContainerStarted","Data":"660079b433f7cd5d3e5cb8c70d7cfdafa3395f454a4d33b9008eafd341ab4562"} Dec 10 07:04:52 crc kubenswrapper[4765]: I1210 07:04:52.434966 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-p286q" event={"ID":"ccb3c4ba-9eab-4b59-85a4-e672a4310cf5","Type":"ContainerStarted","Data":"00302d5d383d694856451c0ee2b34b9e6682d2323420ee09abc6e79f7f994348"} Dec 10 07:04:52 crc kubenswrapper[4765]: I1210 07:04:52.437437 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-4j6rt" event={"ID":"8d2a8965-e959-4d48-bc75-e91d2c235898","Type":"ContainerStarted","Data":"bbbded6186f636505d27c42e76308be3288499bb568a3210f760f688e6b52809"} Dec 10 07:04:52 crc kubenswrapper[4765]: I1210 07:04:52.438534 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" event={"ID":"32ede12b-66f5-42e9-8e6b-77a6e45c3099","Type":"ContainerStarted","Data":"b69b2e2b9df97d313427706013af27c07f791a8d89bfeccd8f52d5d87e5db339"} Dec 10 07:04:52 crc kubenswrapper[4765]: I1210 07:04:52.440157 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"8f3d21f8c3fe011f6de37bb9b8fe365dd62e648f60edb80df7c37bb446ad83d1"} Dec 10 07:04:52 crc kubenswrapper[4765]: I1210 07:04:52.443560 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" event={"ID":"b4131e33-88a7-4b19-8ffc-4029eec86cd3","Type":"ContainerStarted","Data":"07f86d8042e621ea53f48e4ea4dee4712e6c172eaed9d54f66554f54ccc06319"} Dec 10 07:04:52 crc kubenswrapper[4765]: I1210 07:04:52.446626 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-dxgfv" event={"ID":"b9667d31-f2e0-4172-bdc6-c35854e9f81a","Type":"ContainerStarted","Data":"dbdaca976a745439494d93227c927541ba2c39c87962fba4ad176973accef995"} Dec 10 07:04:52 crc kubenswrapper[4765]: I1210 07:04:52.448614 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-kgvfr" event={"ID":"1ccf5120-6d6b-49e6-ae2d-08464b3ab398","Type":"ContainerStarted","Data":"b95f8ffbd932fe00e8b366239c31d552b74d28bbe5c9ccd486caab3be1b40c34"} Dec 10 07:04:52 crc kubenswrapper[4765]: I1210 07:04:52.450749 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-9jg4f" event={"ID":"87a90f47-7e73-45ff-9f98-93bec3ebe12e","Type":"ContainerStarted","Data":"6b57591fb10fa3c0195ecbef7b6c83f100df90fe4757a54f55924844c81233c0"} Dec 10 07:04:53 crc kubenswrapper[4765]: I1210 07:04:53.456685 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-pwpkv" event={"ID":"dbe2661b-7d79-49f0-9d56-5b66f440670f","Type":"ContainerStarted","Data":"4b673eea24ddaaea4e4402fdd4a2bcde01f56f7932560182d208d7ac07504ff0"} Dec 10 07:04:53 crc kubenswrapper[4765]: I1210 07:04:53.459287 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-tdvd4" event={"ID":"23fa0793-f896-4059-b33e-fe00ea97dbab","Type":"ContainerStarted","Data":"bff190fe1ac8ce33d0d641b6bd80d44bfbc210c87aa23c26c09557155b635e78"} Dec 10 07:04:54 crc kubenswrapper[4765]: I1210 07:04:54.468436 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" event={"ID":"b4131e33-88a7-4b19-8ffc-4029eec86cd3","Type":"ContainerStarted","Data":"50fa1d2203a89e0cfc29eec19acffd96e11ab3611cd44d5519a331b3f49416d2"} Dec 10 07:04:54 crc kubenswrapper[4765]: I1210 07:04:54.470269 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:04:54 crc kubenswrapper[4765]: I1210 07:04:54.739023 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" podStartSLOduration=37.738994946 podStartE2EDuration="37.738994946s" podCreationTimestamp="2025-12-10 07:04:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:04:54.736062252 +0000 UTC m=+1014.462727568" watchObservedRunningTime="2025-12-10 07:04:54.738994946 +0000 UTC m=+1014.465660272" Dec 10 07:04:59 crc kubenswrapper[4765]: I1210 07:04:59.625945 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-668858c49-rtprx" Dec 10 07:05:04 crc kubenswrapper[4765]: E1210 07:05:04.589586 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-rg5bf" podUID="29a0de10-d351-4d7c-9dfa-38e628ce116d" Dec 10 07:05:06 crc kubenswrapper[4765]: E1210 07:05:06.993503 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 10 07:05:06 crc kubenswrapper[4765]: E1210 07:05:06.994527 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 10 07:05:06 crc kubenswrapper[4765]: E1210 07:05:06.995633 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zc7q2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-6c677c69b-kgvfr_openstack-operators(1ccf5120-6d6b-49e6-ae2d-08464b3ab398): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 10 07:05:06 crc kubenswrapper[4765]: E1210 07:05:06.995729 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qmsfr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-p286q_openstack-operators(ccb3c4ba-9eab-4b59-85a4-e672a4310cf5): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 10 07:05:06 crc kubenswrapper[4765]: E1210 07:05:06.996524 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 10 07:05:06 crc kubenswrapper[4765]: E1210 07:05:06.996753 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mtck5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-5697bb5779-9jg4f_openstack-operators(87a90f47-7e73-45ff-9f98-93bec3ebe12e): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 10 07:05:06 crc kubenswrapper[4765]: E1210 07:05:06.997018 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 10 07:05:06 crc kubenswrapper[4765]: E1210 07:05:06.997231 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-drlkm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-667bd8d554-dxgfv_openstack-operators(b9667d31-f2e0-4172-bdc6-c35854e9f81a): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 10 07:05:06 crc kubenswrapper[4765]: E1210 07:05:06.998329 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-kgvfr" podUID="1ccf5120-6d6b-49e6-ae2d-08464b3ab398" Dec 10 07:05:06 crc kubenswrapper[4765]: E1210 07:05:06.998428 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-9jg4f" podUID="87a90f47-7e73-45ff-9f98-93bec3ebe12e" Dec 10 07:05:06 crc kubenswrapper[4765]: E1210 07:05:06.998508 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-dxgfv" podUID="b9667d31-f2e0-4172-bdc6-c35854e9f81a" Dec 10 07:05:06 crc kubenswrapper[4765]: E1210 07:05:06.999420 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-p286q" podUID="ccb3c4ba-9eab-4b59-85a4-e672a4310cf5" Dec 10 07:05:07 crc kubenswrapper[4765]: I1210 07:05:07.557963 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-kgvfr" Dec 10 07:05:07 crc kubenswrapper[4765]: I1210 07:05:07.558284 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-p286q" Dec 10 07:05:07 crc kubenswrapper[4765]: I1210 07:05:07.558298 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-dxgfv" Dec 10 07:05:07 crc kubenswrapper[4765]: I1210 07:05:07.560864 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-kgvfr" Dec 10 07:05:07 crc kubenswrapper[4765]: E1210 07:05:07.561021 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-p286q" podUID="ccb3c4ba-9eab-4b59-85a4-e672a4310cf5" Dec 10 07:05:07 crc kubenswrapper[4765]: I1210 07:05:07.561130 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-dxgfv" Dec 10 07:05:07 crc kubenswrapper[4765]: E1210 07:05:07.561274 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-kgvfr" podUID="1ccf5120-6d6b-49e6-ae2d-08464b3ab398" Dec 10 07:05:07 crc kubenswrapper[4765]: E1210 07:05:07.561310 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-9jg4f" podUID="87a90f47-7e73-45ff-9f98-93bec3ebe12e" Dec 10 07:05:07 crc kubenswrapper[4765]: I1210 07:05:07.561349 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-p286q" Dec 10 07:05:07 crc kubenswrapper[4765]: E1210 07:05:07.561586 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-dxgfv" podUID="b9667d31-f2e0-4172-bdc6-c35854e9f81a" Dec 10 07:05:08 crc kubenswrapper[4765]: E1210 07:05:08.120718 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 10 07:05:08 crc kubenswrapper[4765]: E1210 07:05:08.120887 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-znxgb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-wrjkf_openstack-operators(44bbc33f-2848-42f4-b8e9-d99ba69ea07b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:05:08 crc kubenswrapper[4765]: E1210 07:05:08.122078 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wrjkf" podUID="44bbc33f-2848-42f4-b8e9-d99ba69ea07b" Dec 10 07:05:08 crc kubenswrapper[4765]: E1210 07:05:08.137514 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-25qhw" podUID="f9c242b2-1388-431e-8c76-1022426252c7" Dec 10 07:05:08 crc kubenswrapper[4765]: E1210 07:05:08.464578 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 10 07:05:08 crc kubenswrapper[4765]: E1210 07:05:08.465130 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tcl5d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-79c8c4686c-mtl8t_openstack-operators(9b065504-7b79-4bb0-b583-a37953820f14): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:05:08 crc kubenswrapper[4765]: E1210 07:05:08.466344 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-mtl8t" podUID="9b065504-7b79-4bb0-b583-a37953820f14" Dec 10 07:05:08 crc kubenswrapper[4765]: E1210 07:05:08.468850 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-967d97867-xqrjv" podUID="f8c7e1f9-bf7b-4b50-9ee2-b2e73735e720" Dec 10 07:05:08 crc kubenswrapper[4765]: I1210 07:05:08.564919 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-25qhw" event={"ID":"f9c242b2-1388-431e-8c76-1022426252c7","Type":"ContainerStarted","Data":"43fe0e91e2ee394856a53dcfa20fee22664a132dfb8e26c9e164cb0a14a4a695"} Dec 10 07:05:08 crc kubenswrapper[4765]: I1210 07:05:08.566833 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-xqrjv" event={"ID":"f8c7e1f9-bf7b-4b50-9ee2-b2e73735e720","Type":"ContainerStarted","Data":"22859a127fe49ae016bfba8807db84437785f02eacc7044d20dfe22b88ec9b0b"} Dec 10 07:05:08 crc kubenswrapper[4765]: I1210 07:05:08.575738 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w92jp" event={"ID":"68b73b9b-6bcb-4d42-a879-36107b59e8a8","Type":"ContainerStarted","Data":"93003881c3338ed186ebc9e6cc29b01de1c547623bb020308f2c672d66648f92"} Dec 10 07:05:08 crc kubenswrapper[4765]: I1210 07:05:08.583589 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-4j6rt" event={"ID":"8d2a8965-e959-4d48-bc75-e91d2c235898","Type":"ContainerStarted","Data":"186693a46acaabf020941d631aa8fb501f26d9fc044650dcdf014d59380bfbcc"} Dec 10 07:05:08 crc kubenswrapper[4765]: I1210 07:05:08.584650 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-4j6rt" Dec 10 07:05:08 crc kubenswrapper[4765]: I1210 07:05:08.607203 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-tdvd4" event={"ID":"23fa0793-f896-4059-b33e-fe00ea97dbab","Type":"ContainerStarted","Data":"2427e0224dbaf70ac3c3564371cb7adc04c3b0d7309c55b51e6a669a8297dd11"} Dec 10 07:05:08 crc kubenswrapper[4765]: I1210 07:05:08.607237 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-kt2js" event={"ID":"9a932c52-665f-4162-8f00-afdc61891dc5","Type":"ContainerStarted","Data":"4a489fdada9a654f09b002904a2dc0024e11015bf1c859f72216bf35ba7dd720"} Dec 10 07:05:08 crc kubenswrapper[4765]: I1210 07:05:08.607286 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-4j6rt" Dec 10 07:05:08 crc kubenswrapper[4765]: I1210 07:05:08.607300 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-tdvd4" Dec 10 07:05:08 crc kubenswrapper[4765]: I1210 07:05:08.607320 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-tdvd4" Dec 10 07:05:08 crc kubenswrapper[4765]: I1210 07:05:08.621221 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-4j6rt" podStartSLOduration=4.802995635 podStartE2EDuration="52.621200292s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:20.285074612 +0000 UTC m=+980.011739928" lastFinishedPulling="2025-12-10 07:05:08.103279269 +0000 UTC m=+1027.829944585" observedRunningTime="2025-12-10 07:05:08.616865058 +0000 UTC m=+1028.343530374" watchObservedRunningTime="2025-12-10 07:05:08.621200292 +0000 UTC m=+1028.347865608" Dec 10 07:05:08 crc kubenswrapper[4765]: I1210 07:05:08.646156 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-tdvd4" podStartSLOduration=5.008590486 podStartE2EDuration="52.646132781s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:20.322099196 +0000 UTC m=+980.048764512" lastFinishedPulling="2025-12-10 07:05:07.959641501 +0000 UTC m=+1027.686306807" observedRunningTime="2025-12-10 07:05:08.637109124 +0000 UTC m=+1028.363774440" watchObservedRunningTime="2025-12-10 07:05:08.646132781 +0000 UTC m=+1028.372798087" Dec 10 07:05:08 crc kubenswrapper[4765]: E1210 07:05:08.820941 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 10 07:05:08 crc kubenswrapper[4765]: E1210 07:05:08.821111 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2r9gl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-g5tv6_openstack-operators(eb907950-a4b3-4ba5-bea0-3075610995af): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:05:08 crc kubenswrapper[4765]: E1210 07:05:08.822816 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g5tv6" podUID="eb907950-a4b3-4ba5-bea0-3075610995af" Dec 10 07:05:09 crc kubenswrapper[4765]: E1210 07:05:09.365899 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 10 07:05:09 crc kubenswrapper[4765]: E1210 07:05:09.366334 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gg25q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-hbkmm_openstack-operators(eb079565-c2aa-4756-b335-df9a6dac3758): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:05:09 crc kubenswrapper[4765]: E1210 07:05:09.367486 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-hbkmm" podUID="eb079565-c2aa-4756-b335-df9a6dac3758" Dec 10 07:05:09 crc kubenswrapper[4765]: E1210 07:05:09.629931 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = writing blob: storing blob to file \"/var/tmp/container_images_storage1140133407/9\": happened during read: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 10 07:05:09 crc kubenswrapper[4765]: E1210 07:05:09.631133 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j9zfz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-767vs_openstack-operators(06116776-17f7-42fd-a55c-8965a8932070): ErrImagePull: rpc error: code = Canceled desc = writing blob: storing blob to file \"/var/tmp/container_images_storage1140133407/9\": happened during read: context canceled" logger="UnhandledError" Dec 10 07:05:09 crc kubenswrapper[4765]: E1210 07:05:09.632719 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = writing blob: storing blob to file \\\"/var/tmp/container_images_storage1140133407/9\\\": happened during read: context canceled\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-767vs" podUID="06116776-17f7-42fd-a55c-8965a8932070" Dec 10 07:05:09 crc kubenswrapper[4765]: E1210 07:05:09.730496 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 10 07:05:09 crc kubenswrapper[4765]: E1210 07:05:09.730642 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-r5b65,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-sfbj7_openstack-operators(85f78838-e7c3-4c58-8be9-5cb847b81d6d): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 10 07:05:09 crc kubenswrapper[4765]: E1210 07:05:09.736941 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"]" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-sfbj7" podUID="85f78838-e7c3-4c58-8be9-5cb847b81d6d" Dec 10 07:05:10 crc kubenswrapper[4765]: I1210 07:05:10.615897 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-p286q" event={"ID":"ccb3c4ba-9eab-4b59-85a4-e672a4310cf5","Type":"ContainerStarted","Data":"3c257072a6d74910f66bd68d2d0f7ef80919f935a1987121fb8676e428e2a078"} Dec 10 07:05:11 crc kubenswrapper[4765]: I1210 07:05:11.626850 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-dxgfv" event={"ID":"b9667d31-f2e0-4172-bdc6-c35854e9f81a","Type":"ContainerStarted","Data":"fe3d1a25a57f691308da2cfc248415934f9121fa9c8c54137a6146661c42f21b"} Dec 10 07:05:11 crc kubenswrapper[4765]: I1210 07:05:11.628925 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-kgvfr" event={"ID":"1ccf5120-6d6b-49e6-ae2d-08464b3ab398","Type":"ContainerStarted","Data":"44b1bc424a9e66cfbde3d99dfb47609e9d74e52bab3901b2649cddc48f50cf47"} Dec 10 07:05:11 crc kubenswrapper[4765]: I1210 07:05:11.631334 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-kt2js" event={"ID":"9a932c52-665f-4162-8f00-afdc61891dc5","Type":"ContainerStarted","Data":"72dfbac22932dbe492d6aa50130a5f11f40b7c83b3d9b9e8ded9a34a328d8922"} Dec 10 07:05:11 crc kubenswrapper[4765]: I1210 07:05:11.631364 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-kt2js" Dec 10 07:05:11 crc kubenswrapper[4765]: I1210 07:05:11.648920 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-p286q" podStartSLOduration=29.671578306 podStartE2EDuration="55.648901748s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:20.276943421 +0000 UTC m=+980.003608737" lastFinishedPulling="2025-12-10 07:04:46.254266863 +0000 UTC m=+1005.980932179" observedRunningTime="2025-12-10 07:05:10.757619147 +0000 UTC m=+1030.484284453" watchObservedRunningTime="2025-12-10 07:05:11.648901748 +0000 UTC m=+1031.375567064" Dec 10 07:05:11 crc kubenswrapper[4765]: I1210 07:05:11.655492 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-dxgfv" podStartSLOduration=28.723154584 podStartE2EDuration="54.655472175s" podCreationTimestamp="2025-12-10 07:04:17 +0000 UTC" firstStartedPulling="2025-12-10 07:04:20.322058265 +0000 UTC m=+980.048723571" lastFinishedPulling="2025-12-10 07:04:46.254375836 +0000 UTC m=+1005.981041162" observedRunningTime="2025-12-10 07:05:11.648011803 +0000 UTC m=+1031.374677119" watchObservedRunningTime="2025-12-10 07:05:11.655472175 +0000 UTC m=+1031.382137491" Dec 10 07:05:11 crc kubenswrapper[4765]: I1210 07:05:11.684786 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-kt2js" podStartSLOduration=6.274164763 podStartE2EDuration="55.684763519s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:20.326321666 +0000 UTC m=+980.052986982" lastFinishedPulling="2025-12-10 07:05:09.736920422 +0000 UTC m=+1029.463585738" observedRunningTime="2025-12-10 07:05:11.682602007 +0000 UTC m=+1031.409267323" watchObservedRunningTime="2025-12-10 07:05:11.684763519 +0000 UTC m=+1031.411428835" Dec 10 07:05:11 crc kubenswrapper[4765]: I1210 07:05:11.707283 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-kgvfr" podStartSLOduration=28.366487205 podStartE2EDuration="55.707260298s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:18.912415959 +0000 UTC m=+978.639081275" lastFinishedPulling="2025-12-10 07:04:46.253189052 +0000 UTC m=+1005.979854368" observedRunningTime="2025-12-10 07:05:11.700808525 +0000 UTC m=+1031.427473841" watchObservedRunningTime="2025-12-10 07:05:11.707260298 +0000 UTC m=+1031.433925614" Dec 10 07:05:12 crc kubenswrapper[4765]: E1210 07:05:12.039876 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-mjmkd" podUID="f883d81e-6c50-4d92-878d-253a954fcd7a" Dec 10 07:05:12 crc kubenswrapper[4765]: E1210 07:05:12.593073 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zd5jq" podUID="98ff262d-fdd9-4e2e-9cd9-4f570716bf02" Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.754992 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" event={"ID":"32ede12b-66f5-42e9-8e6b-77a6e45c3099","Type":"ContainerStarted","Data":"0d0799197eb3e4fd0e8e75a1226a93d3cfaa70318e5ffc8a5572b46d27430275"} Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.756870 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-mtl8t" event={"ID":"9b065504-7b79-4bb0-b583-a37953820f14","Type":"ContainerStarted","Data":"dd1d2eda6be1d29ace1e6d39b95f1fc0d113b9d0969b5ed3f847f91ec9f065a4"} Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.759535 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" event={"ID":"06b9f74b-2726-43cb-9353-dec4e4a34f01","Type":"ContainerStarted","Data":"911c8a78c40c1ba663c34c0e040b2d1098e30ce7c66e5d401f6c38627043a292"} Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.760912 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-767vs" event={"ID":"06116776-17f7-42fd-a55c-8965a8932070","Type":"ContainerStarted","Data":"9341e4b9fd13aa4439ccdae6278c7a4fe4eb622060879b415eb1ba6b7511be35"} Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.764499 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w92jp" event={"ID":"68b73b9b-6bcb-4d42-a879-36107b59e8a8","Type":"ContainerStarted","Data":"966af9843a7a2c4eea0debcc6a8ef9f2d3db43919a896b62614b90864fd17ee5"} Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.766112 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w92jp" Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.768716 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w92jp" Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.771585 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wrjkf" event={"ID":"44bbc33f-2848-42f4-b8e9-d99ba69ea07b","Type":"ContainerStarted","Data":"6154e96a5339bb59c4fc06b8b024aa272d344656ee5708e634dc7234e6212a4f"} Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.780714 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-pwpkv" event={"ID":"dbe2661b-7d79-49f0-9d56-5b66f440670f","Type":"ContainerStarted","Data":"9a542e83304876eecf1e93679bf60c3bd83e89a45068eb5489bb09ddcfa434ae"} Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.782461 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-pwpkv" Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.789548 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-pwpkv" Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.800302 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-25qhw" Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.805727 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zd5jq" event={"ID":"98ff262d-fdd9-4e2e-9cd9-4f570716bf02","Type":"ContainerStarted","Data":"336f829fe9ee26a6abb19308aaf2066b45f126760f732769c33f2bca8aa8ccb5"} Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.819500 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-w92jp" podStartSLOduration=7.35111527 podStartE2EDuration="56.819477189s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:20.326999576 +0000 UTC m=+980.053664882" lastFinishedPulling="2025-12-10 07:05:09.795361485 +0000 UTC m=+1029.522026801" observedRunningTime="2025-12-10 07:05:12.816461963 +0000 UTC m=+1032.543127279" watchObservedRunningTime="2025-12-10 07:05:12.819477189 +0000 UTC m=+1032.546142505" Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.823074 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-mjmkd" event={"ID":"f883d81e-6c50-4d92-878d-253a954fcd7a","Type":"ContainerStarted","Data":"6502e94023568a280b00acc051251cc2017b4f6b959f399f1bba10818d097552"} Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.859453 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-xqrjv" event={"ID":"f8c7e1f9-bf7b-4b50-9ee2-b2e73735e720","Type":"ContainerStarted","Data":"0e00e106dc77a99be8d9fac9346e0fbd69bdee6f4f7206588c066a4d29d1e0cc"} Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.859812 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-967d97867-xqrjv" Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.865438 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-25qhw" podStartSLOduration=4.87869931 podStartE2EDuration="56.865420576s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:19.824996606 +0000 UTC m=+979.551661922" lastFinishedPulling="2025-12-10 07:05:11.811717872 +0000 UTC m=+1031.538383188" observedRunningTime="2025-12-10 07:05:12.86415715 +0000 UTC m=+1032.590822476" watchObservedRunningTime="2025-12-10 07:05:12.865420576 +0000 UTC m=+1032.592085892" Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.869966 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-hbkmm" event={"ID":"eb079565-c2aa-4756-b335-df9a6dac3758","Type":"ContainerStarted","Data":"b86dcb02e25fa4ac2a0757f7e7d6a6e3acc5d74f8bad33212ecadc6b1928501d"} Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.874237 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-kt2js" Dec 10 07:05:12 crc kubenswrapper[4765]: I1210 07:05:12.967757 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-pwpkv" podStartSLOduration=7.742625763 podStartE2EDuration="56.967736069s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:20.321647343 +0000 UTC m=+980.048312659" lastFinishedPulling="2025-12-10 07:05:09.546757649 +0000 UTC m=+1029.273422965" observedRunningTime="2025-12-10 07:05:12.961681577 +0000 UTC m=+1032.688346893" watchObservedRunningTime="2025-12-10 07:05:12.967736069 +0000 UTC m=+1032.694401385" Dec 10 07:05:13 crc kubenswrapper[4765]: I1210 07:05:13.131257 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-967d97867-xqrjv" podStartSLOduration=6.430772661 podStartE2EDuration="57.131233773s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:19.091433375 +0000 UTC m=+978.818098691" lastFinishedPulling="2025-12-10 07:05:09.791894477 +0000 UTC m=+1029.518559803" observedRunningTime="2025-12-10 07:05:13.005812373 +0000 UTC m=+1032.732477729" watchObservedRunningTime="2025-12-10 07:05:13.131233773 +0000 UTC m=+1032.857899089" Dec 10 07:05:13 crc kubenswrapper[4765]: I1210 07:05:13.977765 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-767vs" event={"ID":"06116776-17f7-42fd-a55c-8965a8932070","Type":"ContainerStarted","Data":"e88a621eb92eea1d747dd4aac889d2838762a149367f8331c6ead2d12315ad4f"} Dec 10 07:05:13 crc kubenswrapper[4765]: I1210 07:05:13.977843 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-767vs" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.005146 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-hbkmm" event={"ID":"eb079565-c2aa-4756-b335-df9a6dac3758","Type":"ContainerStarted","Data":"91c43d427a8bc2cf16ed89c1e95d91e63dd400ce4aceae9f109dd7dcfb9950a7"} Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.006067 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-hbkmm" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.048149 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" event={"ID":"32ede12b-66f5-42e9-8e6b-77a6e45c3099","Type":"ContainerStarted","Data":"96a4a5876c507708bd6e630f1275311d1596cc01688462cf3f65bffd67e450ce"} Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.049378 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.063961 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sw27b"] Dec 10 07:05:14 crc kubenswrapper[4765]: E1210 07:05:14.064292 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d5b1a2a-4382-4aa4-9a12-676b126e421b" containerName="extract-utilities" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.064309 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d5b1a2a-4382-4aa4-9a12-676b126e421b" containerName="extract-utilities" Dec 10 07:05:14 crc kubenswrapper[4765]: E1210 07:05:14.064334 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d5b1a2a-4382-4aa4-9a12-676b126e421b" containerName="extract-content" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.064341 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d5b1a2a-4382-4aa4-9a12-676b126e421b" containerName="extract-content" Dec 10 07:05:14 crc kubenswrapper[4765]: E1210 07:05:14.064361 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d5b1a2a-4382-4aa4-9a12-676b126e421b" containerName="registry-server" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.064368 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d5b1a2a-4382-4aa4-9a12-676b126e421b" containerName="registry-server" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.064520 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d5b1a2a-4382-4aa4-9a12-676b126e421b" containerName="registry-server" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.065709 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sw27b" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.072498 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-25qhw" event={"ID":"f9c242b2-1388-431e-8c76-1022426252c7","Type":"ContainerStarted","Data":"f2087f1deedda0b32d0d3de0cb396f10b641da96ae75719ba108b5fafb20c759"} Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.089205 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sw27b"] Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.097811 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-sfbj7" event={"ID":"85f78838-e7c3-4c58-8be9-5cb847b81d6d","Type":"ContainerStarted","Data":"bc0d055eeac9a093bac7aee113bfdf98d73aa1af535fa4912bbe55acef6a565a"} Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.138394 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" event={"ID":"06b9f74b-2726-43cb-9353-dec4e4a34f01","Type":"ContainerStarted","Data":"b62d952e4c000a4f3a205821cc10080588360416677024e9d91ad7af6d19bc9f"} Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.138665 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.340109 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23f1cedf-5038-44e0-93bb-415fa134eddf-utilities\") pod \"redhat-marketplace-sw27b\" (UID: \"23f1cedf-5038-44e0-93bb-415fa134eddf\") " pod="openshift-marketplace/redhat-marketplace-sw27b" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.340158 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gr9q4\" (UniqueName: \"kubernetes.io/projected/23f1cedf-5038-44e0-93bb-415fa134eddf-kube-api-access-gr9q4\") pod \"redhat-marketplace-sw27b\" (UID: \"23f1cedf-5038-44e0-93bb-415fa134eddf\") " pod="openshift-marketplace/redhat-marketplace-sw27b" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.340238 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23f1cedf-5038-44e0-93bb-415fa134eddf-catalog-content\") pod \"redhat-marketplace-sw27b\" (UID: \"23f1cedf-5038-44e0-93bb-415fa134eddf\") " pod="openshift-marketplace/redhat-marketplace-sw27b" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.340728 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-mtl8t" event={"ID":"9b065504-7b79-4bb0-b583-a37953820f14","Type":"ContainerStarted","Data":"4a19224f83c316b890729370653610d167a7dffe77a1c968fdb8f517411d8f79"} Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.341242 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-mtl8t" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.346349 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g5tv6" event={"ID":"eb907950-a4b3-4ba5-bea0-3075610995af","Type":"ContainerStarted","Data":"e5f1fc9e6247c88bb2713590335bc3b40949cf4c22a2998277b6908ee72970cd"} Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.346389 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g5tv6" event={"ID":"eb907950-a4b3-4ba5-bea0-3075610995af","Type":"ContainerStarted","Data":"259c14276f8a5ed5b3af6e2023b798ad2012ea3afb9e43ccc6c6a41cb45574dd"} Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.347016 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g5tv6" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.352509 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wrjkf" event={"ID":"44bbc33f-2848-42f4-b8e9-d99ba69ea07b","Type":"ContainerStarted","Data":"23d35f7c9d2e13e7b4461f3021c67ec8815030a6bd393808a8d5695d5b5df870"} Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.352984 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wrjkf" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.398594 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-767vs" podStartSLOduration=7.040076355 podStartE2EDuration="58.398575099s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:20.316399954 +0000 UTC m=+980.043065270" lastFinishedPulling="2025-12-10 07:05:11.674898698 +0000 UTC m=+1031.401564014" observedRunningTime="2025-12-10 07:05:14.098133807 +0000 UTC m=+1033.824799133" watchObservedRunningTime="2025-12-10 07:05:14.398575099 +0000 UTC m=+1034.125240415" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.435425 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-hbkmm" podStartSLOduration=6.669494878 podStartE2EDuration="58.435409048s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:19.927393141 +0000 UTC m=+979.654058457" lastFinishedPulling="2025-12-10 07:05:11.693307311 +0000 UTC m=+1031.419972627" observedRunningTime="2025-12-10 07:05:14.432781443 +0000 UTC m=+1034.159446769" watchObservedRunningTime="2025-12-10 07:05:14.435409048 +0000 UTC m=+1034.162074364" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.441614 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gr9q4\" (UniqueName: \"kubernetes.io/projected/23f1cedf-5038-44e0-93bb-415fa134eddf-kube-api-access-gr9q4\") pod \"redhat-marketplace-sw27b\" (UID: \"23f1cedf-5038-44e0-93bb-415fa134eddf\") " pod="openshift-marketplace/redhat-marketplace-sw27b" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.441660 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23f1cedf-5038-44e0-93bb-415fa134eddf-utilities\") pod \"redhat-marketplace-sw27b\" (UID: \"23f1cedf-5038-44e0-93bb-415fa134eddf\") " pod="openshift-marketplace/redhat-marketplace-sw27b" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.441706 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23f1cedf-5038-44e0-93bb-415fa134eddf-catalog-content\") pod \"redhat-marketplace-sw27b\" (UID: \"23f1cedf-5038-44e0-93bb-415fa134eddf\") " pod="openshift-marketplace/redhat-marketplace-sw27b" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.442178 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23f1cedf-5038-44e0-93bb-415fa134eddf-catalog-content\") pod \"redhat-marketplace-sw27b\" (UID: \"23f1cedf-5038-44e0-93bb-415fa134eddf\") " pod="openshift-marketplace/redhat-marketplace-sw27b" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.442630 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23f1cedf-5038-44e0-93bb-415fa134eddf-utilities\") pod \"redhat-marketplace-sw27b\" (UID: \"23f1cedf-5038-44e0-93bb-415fa134eddf\") " pod="openshift-marketplace/redhat-marketplace-sw27b" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.549476 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" podStartSLOduration=40.629763818 podStartE2EDuration="58.549460544s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:51.865183931 +0000 UTC m=+1011.591849247" lastFinishedPulling="2025-12-10 07:05:09.784880657 +0000 UTC m=+1029.511545973" observedRunningTime="2025-12-10 07:05:14.54054539 +0000 UTC m=+1034.267210726" watchObservedRunningTime="2025-12-10 07:05:14.549460544 +0000 UTC m=+1034.276125870" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.563825 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gr9q4\" (UniqueName: \"kubernetes.io/projected/23f1cedf-5038-44e0-93bb-415fa134eddf-kube-api-access-gr9q4\") pod \"redhat-marketplace-sw27b\" (UID: \"23f1cedf-5038-44e0-93bb-415fa134eddf\") " pod="openshift-marketplace/redhat-marketplace-sw27b" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.602336 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g5tv6" podStartSLOduration=7.156750857 podStartE2EDuration="58.602320029s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:20.24531372 +0000 UTC m=+979.971979026" lastFinishedPulling="2025-12-10 07:05:11.690882882 +0000 UTC m=+1031.417548198" observedRunningTime="2025-12-10 07:05:14.59569672 +0000 UTC m=+1034.322362036" watchObservedRunningTime="2025-12-10 07:05:14.602320029 +0000 UTC m=+1034.328985345" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.652771 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-mtl8t" podStartSLOduration=9.069437492 podStartE2EDuration="58.652747144s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:20.208442091 +0000 UTC m=+979.935107407" lastFinishedPulling="2025-12-10 07:05:09.791751743 +0000 UTC m=+1029.518417059" observedRunningTime="2025-12-10 07:05:14.646026283 +0000 UTC m=+1034.372691599" watchObservedRunningTime="2025-12-10 07:05:14.652747144 +0000 UTC m=+1034.379412460" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.661439 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sw27b" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.702291 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" podStartSLOduration=38.876685236 podStartE2EDuration="58.702277644s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:51.868635689 +0000 UTC m=+1011.595301005" lastFinishedPulling="2025-12-10 07:05:11.694228097 +0000 UTC m=+1031.420893413" observedRunningTime="2025-12-10 07:05:14.697377935 +0000 UTC m=+1034.424043251" watchObservedRunningTime="2025-12-10 07:05:14.702277644 +0000 UTC m=+1034.428942950" Dec 10 07:05:14 crc kubenswrapper[4765]: I1210 07:05:14.726805 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wrjkf" podStartSLOduration=9.229381046 podStartE2EDuration="58.726791132s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:20.294300275 +0000 UTC m=+980.020965591" lastFinishedPulling="2025-12-10 07:05:09.791710371 +0000 UTC m=+1029.518375677" observedRunningTime="2025-12-10 07:05:14.724516297 +0000 UTC m=+1034.451181613" watchObservedRunningTime="2025-12-10 07:05:14.726791132 +0000 UTC m=+1034.453456448" Dec 10 07:05:15 crc kubenswrapper[4765]: I1210 07:05:15.441331 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-sfbj7" event={"ID":"85f78838-e7c3-4c58-8be9-5cb847b81d6d","Type":"ContainerStarted","Data":"fe3e87a96d3a8e26d4244679d5372cb7a53e143d72af9643cddde9df729a311e"} Dec 10 07:05:15 crc kubenswrapper[4765]: I1210 07:05:15.470750 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-sfbj7" Dec 10 07:05:15 crc kubenswrapper[4765]: I1210 07:05:15.511461 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-sfbj7" podStartSLOduration=7.399154378 podStartE2EDuration="59.511435537s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:20.012855074 +0000 UTC m=+979.739520390" lastFinishedPulling="2025-12-10 07:05:12.125136233 +0000 UTC m=+1031.851801549" observedRunningTime="2025-12-10 07:05:15.506408224 +0000 UTC m=+1035.233073560" watchObservedRunningTime="2025-12-10 07:05:15.511435537 +0000 UTC m=+1035.238100863" Dec 10 07:05:15 crc kubenswrapper[4765]: I1210 07:05:15.752150 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zd5jq" event={"ID":"98ff262d-fdd9-4e2e-9cd9-4f570716bf02","Type":"ContainerStarted","Data":"123a30cab94481fa8a4e812c926475bef3d6ff986688a6a65c58807a271a6191"} Dec 10 07:05:15 crc kubenswrapper[4765]: I1210 07:05:15.753678 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zd5jq" Dec 10 07:05:15 crc kubenswrapper[4765]: I1210 07:05:15.755905 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-mjmkd" event={"ID":"f883d81e-6c50-4d92-878d-253a954fcd7a","Type":"ContainerStarted","Data":"6f88984b5e332bc1487d92528b89ef84ae199e982387bb3724a023d85dc60350"} Dec 10 07:05:15 crc kubenswrapper[4765]: I1210 07:05:15.810408 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zd5jq" podStartSLOduration=3.633047933 podStartE2EDuration="59.810381627s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:17.789640649 +0000 UTC m=+977.516305965" lastFinishedPulling="2025-12-10 07:05:13.966974343 +0000 UTC m=+1033.693639659" observedRunningTime="2025-12-10 07:05:15.787520096 +0000 UTC m=+1035.514185412" watchObservedRunningTime="2025-12-10 07:05:15.810381627 +0000 UTC m=+1035.537046943" Dec 10 07:05:15 crc kubenswrapper[4765]: I1210 07:05:15.822329 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sw27b"] Dec 10 07:05:15 crc kubenswrapper[4765]: I1210 07:05:15.832944 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-mjmkd" podStartSLOduration=6.079716618 podStartE2EDuration="59.832921428s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:19.814408535 +0000 UTC m=+979.541073851" lastFinishedPulling="2025-12-10 07:05:13.567613335 +0000 UTC m=+1033.294278661" observedRunningTime="2025-12-10 07:05:15.819404874 +0000 UTC m=+1035.546070190" watchObservedRunningTime="2025-12-10 07:05:15.832921428 +0000 UTC m=+1035.559586744" Dec 10 07:05:15 crc kubenswrapper[4765]: W1210 07:05:15.865462 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23f1cedf_5038_44e0_93bb_415fa134eddf.slice/crio-d31ec55358cc681e01e958e1547ddd23916d8f66fde149fc208f877702f393ef WatchSource:0}: Error finding container d31ec55358cc681e01e958e1547ddd23916d8f66fde149fc208f877702f393ef: Status 404 returned error can't find the container with id d31ec55358cc681e01e958e1547ddd23916d8f66fde149fc208f877702f393ef Dec 10 07:05:16 crc kubenswrapper[4765]: I1210 07:05:16.790119 4765 generic.go:334] "Generic (PLEG): container finished" podID="23f1cedf-5038-44e0-93bb-415fa134eddf" containerID="33df6f0af2ce527343d877c99fdad04d3ff0b5f85fee7c753b17aafaf0b7ba79" exitCode=0 Dec 10 07:05:16 crc kubenswrapper[4765]: I1210 07:05:16.791316 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sw27b" event={"ID":"23f1cedf-5038-44e0-93bb-415fa134eddf","Type":"ContainerDied","Data":"33df6f0af2ce527343d877c99fdad04d3ff0b5f85fee7c753b17aafaf0b7ba79"} Dec 10 07:05:16 crc kubenswrapper[4765]: I1210 07:05:16.791342 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sw27b" event={"ID":"23f1cedf-5038-44e0-93bb-415fa134eddf","Type":"ContainerStarted","Data":"d31ec55358cc681e01e958e1547ddd23916d8f66fde149fc208f877702f393ef"} Dec 10 07:05:16 crc kubenswrapper[4765]: I1210 07:05:16.792786 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-mjmkd" Dec 10 07:05:16 crc kubenswrapper[4765]: I1210 07:05:16.849488 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-9jg4f" Dec 10 07:05:16 crc kubenswrapper[4765]: I1210 07:05:16.852890 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-9jg4f" Dec 10 07:05:17 crc kubenswrapper[4765]: I1210 07:05:17.102789 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-967d97867-xqrjv" Dec 10 07:05:17 crc kubenswrapper[4765]: I1210 07:05:17.622853 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-25qhw" Dec 10 07:05:17 crc kubenswrapper[4765]: I1210 07:05:17.653352 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wrjkf" Dec 10 07:05:17 crc kubenswrapper[4765]: I1210 07:05:17.744499 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-767vs" Dec 10 07:05:17 crc kubenswrapper[4765]: I1210 07:05:17.745004 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-mtl8t" Dec 10 07:05:17 crc kubenswrapper[4765]: I1210 07:05:17.769811 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-g5tv6" Dec 10 07:05:17 crc kubenswrapper[4765]: I1210 07:05:17.807916 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-9jg4f" event={"ID":"87a90f47-7e73-45ff-9f98-93bec3ebe12e","Type":"ContainerStarted","Data":"ea7cea7057353baf4b4cd7679686780dc57e5d074ee88838dcea6b11d0d44bf4"} Dec 10 07:05:17 crc kubenswrapper[4765]: I1210 07:05:17.948306 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-9jg4f" podStartSLOduration=34.377792592 podStartE2EDuration="1m1.948290104s" podCreationTimestamp="2025-12-10 07:04:16 +0000 UTC" firstStartedPulling="2025-12-10 07:04:18.683692748 +0000 UTC m=+978.410358064" lastFinishedPulling="2025-12-10 07:04:46.25419026 +0000 UTC m=+1005.980855576" observedRunningTime="2025-12-10 07:05:17.944727533 +0000 UTC m=+1037.671392869" watchObservedRunningTime="2025-12-10 07:05:17.948290104 +0000 UTC m=+1037.674955420" Dec 10 07:05:19 crc kubenswrapper[4765]: I1210 07:05:19.208560 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sw27b" event={"ID":"23f1cedf-5038-44e0-93bb-415fa134eddf","Type":"ContainerStarted","Data":"446d552f0abc7b671a532ad4aacfcb3a22100c68a8d5a22f8ee5fca03349e4b7"} Dec 10 07:05:19 crc kubenswrapper[4765]: I1210 07:05:19.629353 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-694d6cfbd628ppx" Dec 10 07:05:20 crc kubenswrapper[4765]: I1210 07:05:20.224849 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-rg5bf" event={"ID":"29a0de10-d351-4d7c-9dfa-38e628ce116d","Type":"ContainerStarted","Data":"28dd0d05663f1b1d8069b2c4fb0def95faf9401ece82cebc339a3025097e1c6e"} Dec 10 07:05:20 crc kubenswrapper[4765]: I1210 07:05:20.234173 4765 generic.go:334] "Generic (PLEG): container finished" podID="23f1cedf-5038-44e0-93bb-415fa134eddf" containerID="446d552f0abc7b671a532ad4aacfcb3a22100c68a8d5a22f8ee5fca03349e4b7" exitCode=0 Dec 10 07:05:20 crc kubenswrapper[4765]: I1210 07:05:20.234248 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sw27b" event={"ID":"23f1cedf-5038-44e0-93bb-415fa134eddf","Type":"ContainerDied","Data":"446d552f0abc7b671a532ad4aacfcb3a22100c68a8d5a22f8ee5fca03349e4b7"} Dec 10 07:05:20 crc kubenswrapper[4765]: I1210 07:05:20.252074 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-rg5bf" podStartSLOduration=5.228481439 podStartE2EDuration="1m3.252047429s" podCreationTimestamp="2025-12-10 07:04:17 +0000 UTC" firstStartedPulling="2025-12-10 07:04:20.323011012 +0000 UTC m=+980.049676328" lastFinishedPulling="2025-12-10 07:05:18.346577002 +0000 UTC m=+1038.073242318" observedRunningTime="2025-12-10 07:05:20.248344324 +0000 UTC m=+1039.975009660" watchObservedRunningTime="2025-12-10 07:05:20.252047429 +0000 UTC m=+1039.978712745" Dec 10 07:05:21 crc kubenswrapper[4765]: I1210 07:05:21.245760 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sw27b" event={"ID":"23f1cedf-5038-44e0-93bb-415fa134eddf","Type":"ContainerStarted","Data":"47baf5972d32505d8e0090e502d174900cc813d3f314de7937096d4d882e7649"} Dec 10 07:05:21 crc kubenswrapper[4765]: I1210 07:05:21.275386 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sw27b" podStartSLOduration=3.268855393 podStartE2EDuration="7.275359446s" podCreationTimestamp="2025-12-10 07:05:14 +0000 UTC" firstStartedPulling="2025-12-10 07:05:16.794181122 +0000 UTC m=+1036.520846438" lastFinishedPulling="2025-12-10 07:05:20.800685175 +0000 UTC m=+1040.527350491" observedRunningTime="2025-12-10 07:05:21.268757318 +0000 UTC m=+1040.995422644" watchObservedRunningTime="2025-12-10 07:05:21.275359446 +0000 UTC m=+1041.002024752" Dec 10 07:05:22 crc kubenswrapper[4765]: I1210 07:05:22.929961 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7f8r9" Dec 10 07:05:24 crc kubenswrapper[4765]: I1210 07:05:24.662666 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sw27b" Dec 10 07:05:24 crc kubenswrapper[4765]: I1210 07:05:24.663052 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sw27b" Dec 10 07:05:24 crc kubenswrapper[4765]: I1210 07:05:24.710579 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sw27b" Dec 10 07:05:25 crc kubenswrapper[4765]: I1210 07:05:25.325006 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sw27b" Dec 10 07:05:25 crc kubenswrapper[4765]: I1210 07:05:25.384325 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sw27b"] Dec 10 07:05:26 crc kubenswrapper[4765]: I1210 07:05:26.812921 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-zd5jq" Dec 10 07:05:26 crc kubenswrapper[4765]: I1210 07:05:26.939201 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-hbkmm" Dec 10 07:05:26 crc kubenswrapper[4765]: I1210 07:05:26.959699 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-sfbj7" Dec 10 07:05:27 crc kubenswrapper[4765]: I1210 07:05:27.091638 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-mjmkd" Dec 10 07:05:27 crc kubenswrapper[4765]: I1210 07:05:27.292414 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-sw27b" podUID="23f1cedf-5038-44e0-93bb-415fa134eddf" containerName="registry-server" containerID="cri-o://47baf5972d32505d8e0090e502d174900cc813d3f314de7937096d4d882e7649" gracePeriod=2 Dec 10 07:05:29 crc kubenswrapper[4765]: I1210 07:05:29.307258 4765 generic.go:334] "Generic (PLEG): container finished" podID="23f1cedf-5038-44e0-93bb-415fa134eddf" containerID="47baf5972d32505d8e0090e502d174900cc813d3f314de7937096d4d882e7649" exitCode=0 Dec 10 07:05:29 crc kubenswrapper[4765]: I1210 07:05:29.307286 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sw27b" event={"ID":"23f1cedf-5038-44e0-93bb-415fa134eddf","Type":"ContainerDied","Data":"47baf5972d32505d8e0090e502d174900cc813d3f314de7937096d4d882e7649"} Dec 10 07:05:29 crc kubenswrapper[4765]: I1210 07:05:29.683578 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sw27b" Dec 10 07:05:29 crc kubenswrapper[4765]: I1210 07:05:29.804043 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23f1cedf-5038-44e0-93bb-415fa134eddf-catalog-content\") pod \"23f1cedf-5038-44e0-93bb-415fa134eddf\" (UID: \"23f1cedf-5038-44e0-93bb-415fa134eddf\") " Dec 10 07:05:29 crc kubenswrapper[4765]: I1210 07:05:29.804169 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gr9q4\" (UniqueName: \"kubernetes.io/projected/23f1cedf-5038-44e0-93bb-415fa134eddf-kube-api-access-gr9q4\") pod \"23f1cedf-5038-44e0-93bb-415fa134eddf\" (UID: \"23f1cedf-5038-44e0-93bb-415fa134eddf\") " Dec 10 07:05:29 crc kubenswrapper[4765]: I1210 07:05:29.804282 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23f1cedf-5038-44e0-93bb-415fa134eddf-utilities\") pod \"23f1cedf-5038-44e0-93bb-415fa134eddf\" (UID: \"23f1cedf-5038-44e0-93bb-415fa134eddf\") " Dec 10 07:05:29 crc kubenswrapper[4765]: I1210 07:05:29.805172 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23f1cedf-5038-44e0-93bb-415fa134eddf-utilities" (OuterVolumeSpecName: "utilities") pod "23f1cedf-5038-44e0-93bb-415fa134eddf" (UID: "23f1cedf-5038-44e0-93bb-415fa134eddf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:05:29 crc kubenswrapper[4765]: I1210 07:05:29.811181 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23f1cedf-5038-44e0-93bb-415fa134eddf-kube-api-access-gr9q4" (OuterVolumeSpecName: "kube-api-access-gr9q4") pod "23f1cedf-5038-44e0-93bb-415fa134eddf" (UID: "23f1cedf-5038-44e0-93bb-415fa134eddf"). InnerVolumeSpecName "kube-api-access-gr9q4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:05:29 crc kubenswrapper[4765]: I1210 07:05:29.822382 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23f1cedf-5038-44e0-93bb-415fa134eddf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "23f1cedf-5038-44e0-93bb-415fa134eddf" (UID: "23f1cedf-5038-44e0-93bb-415fa134eddf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:05:29 crc kubenswrapper[4765]: I1210 07:05:29.906263 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23f1cedf-5038-44e0-93bb-415fa134eddf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:05:29 crc kubenswrapper[4765]: I1210 07:05:29.906306 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gr9q4\" (UniqueName: \"kubernetes.io/projected/23f1cedf-5038-44e0-93bb-415fa134eddf-kube-api-access-gr9q4\") on node \"crc\" DevicePath \"\"" Dec 10 07:05:29 crc kubenswrapper[4765]: I1210 07:05:29.906322 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23f1cedf-5038-44e0-93bb-415fa134eddf-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:05:30 crc kubenswrapper[4765]: I1210 07:05:30.316526 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sw27b" event={"ID":"23f1cedf-5038-44e0-93bb-415fa134eddf","Type":"ContainerDied","Data":"d31ec55358cc681e01e958e1547ddd23916d8f66fde149fc208f877702f393ef"} Dec 10 07:05:30 crc kubenswrapper[4765]: I1210 07:05:30.317192 4765 scope.go:117] "RemoveContainer" containerID="47baf5972d32505d8e0090e502d174900cc813d3f314de7937096d4d882e7649" Dec 10 07:05:30 crc kubenswrapper[4765]: I1210 07:05:30.316593 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sw27b" Dec 10 07:05:30 crc kubenswrapper[4765]: I1210 07:05:30.334866 4765 scope.go:117] "RemoveContainer" containerID="446d552f0abc7b671a532ad4aacfcb3a22100c68a8d5a22f8ee5fca03349e4b7" Dec 10 07:05:30 crc kubenswrapper[4765]: I1210 07:05:30.352759 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sw27b"] Dec 10 07:05:30 crc kubenswrapper[4765]: I1210 07:05:30.359818 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-sw27b"] Dec 10 07:05:30 crc kubenswrapper[4765]: I1210 07:05:30.375664 4765 scope.go:117] "RemoveContainer" containerID="33df6f0af2ce527343d877c99fdad04d3ff0b5f85fee7c753b17aafaf0b7ba79" Dec 10 07:05:30 crc kubenswrapper[4765]: I1210 07:05:30.597359 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23f1cedf-5038-44e0-93bb-415fa134eddf" path="/var/lib/kubelet/pods/23f1cedf-5038-44e0-93bb-415fa134eddf/volumes" Dec 10 07:05:41 crc kubenswrapper[4765]: I1210 07:05:41.864214 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-gmwzv"] Dec 10 07:05:41 crc kubenswrapper[4765]: E1210 07:05:41.865021 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23f1cedf-5038-44e0-93bb-415fa134eddf" containerName="extract-utilities" Dec 10 07:05:41 crc kubenswrapper[4765]: I1210 07:05:41.865033 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="23f1cedf-5038-44e0-93bb-415fa134eddf" containerName="extract-utilities" Dec 10 07:05:41 crc kubenswrapper[4765]: E1210 07:05:41.865047 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23f1cedf-5038-44e0-93bb-415fa134eddf" containerName="extract-content" Dec 10 07:05:41 crc kubenswrapper[4765]: I1210 07:05:41.865053 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="23f1cedf-5038-44e0-93bb-415fa134eddf" containerName="extract-content" Dec 10 07:05:41 crc kubenswrapper[4765]: E1210 07:05:41.865060 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23f1cedf-5038-44e0-93bb-415fa134eddf" containerName="registry-server" Dec 10 07:05:41 crc kubenswrapper[4765]: I1210 07:05:41.865067 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="23f1cedf-5038-44e0-93bb-415fa134eddf" containerName="registry-server" Dec 10 07:05:41 crc kubenswrapper[4765]: I1210 07:05:41.865241 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="23f1cedf-5038-44e0-93bb-415fa134eddf" containerName="registry-server" Dec 10 07:05:41 crc kubenswrapper[4765]: I1210 07:05:41.866057 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-gmwzv" Dec 10 07:05:41 crc kubenswrapper[4765]: I1210 07:05:41.874796 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 10 07:05:41 crc kubenswrapper[4765]: I1210 07:05:41.874818 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 10 07:05:41 crc kubenswrapper[4765]: I1210 07:05:41.875348 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 10 07:05:41 crc kubenswrapper[4765]: I1210 07:05:41.886009 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-gmwzv"] Dec 10 07:05:41 crc kubenswrapper[4765]: I1210 07:05:41.887976 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-b29sr" Dec 10 07:05:41 crc kubenswrapper[4765]: I1210 07:05:41.967659 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-pbcwp"] Dec 10 07:05:41 crc kubenswrapper[4765]: I1210 07:05:41.969228 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-pbcwp" Dec 10 07:05:41 crc kubenswrapper[4765]: I1210 07:05:41.969285 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxv44\" (UniqueName: \"kubernetes.io/projected/b3cf4274-779d-4ceb-b45c-69f500042012-kube-api-access-jxv44\") pod \"dnsmasq-dns-84bb9d8bd9-gmwzv\" (UID: \"b3cf4274-779d-4ceb-b45c-69f500042012\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-gmwzv" Dec 10 07:05:41 crc kubenswrapper[4765]: I1210 07:05:41.969401 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3cf4274-779d-4ceb-b45c-69f500042012-config\") pod \"dnsmasq-dns-84bb9d8bd9-gmwzv\" (UID: \"b3cf4274-779d-4ceb-b45c-69f500042012\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-gmwzv" Dec 10 07:05:41 crc kubenswrapper[4765]: I1210 07:05:41.971681 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 10 07:05:41 crc kubenswrapper[4765]: I1210 07:05:41.985161 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-pbcwp"] Dec 10 07:05:42 crc kubenswrapper[4765]: I1210 07:05:42.071069 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxv44\" (UniqueName: \"kubernetes.io/projected/b3cf4274-779d-4ceb-b45c-69f500042012-kube-api-access-jxv44\") pod \"dnsmasq-dns-84bb9d8bd9-gmwzv\" (UID: \"b3cf4274-779d-4ceb-b45c-69f500042012\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-gmwzv" Dec 10 07:05:42 crc kubenswrapper[4765]: I1210 07:05:42.071142 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76139240-5b05-4702-a654-4c7905d8f031-config\") pod \"dnsmasq-dns-5f854695bc-pbcwp\" (UID: \"76139240-5b05-4702-a654-4c7905d8f031\") " pod="openstack/dnsmasq-dns-5f854695bc-pbcwp" Dec 10 07:05:42 crc kubenswrapper[4765]: I1210 07:05:42.071194 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76139240-5b05-4702-a654-4c7905d8f031-dns-svc\") pod \"dnsmasq-dns-5f854695bc-pbcwp\" (UID: \"76139240-5b05-4702-a654-4c7905d8f031\") " pod="openstack/dnsmasq-dns-5f854695bc-pbcwp" Dec 10 07:05:42 crc kubenswrapper[4765]: I1210 07:05:42.071219 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3cf4274-779d-4ceb-b45c-69f500042012-config\") pod \"dnsmasq-dns-84bb9d8bd9-gmwzv\" (UID: \"b3cf4274-779d-4ceb-b45c-69f500042012\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-gmwzv" Dec 10 07:05:42 crc kubenswrapper[4765]: I1210 07:05:42.071247 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvd7c\" (UniqueName: \"kubernetes.io/projected/76139240-5b05-4702-a654-4c7905d8f031-kube-api-access-vvd7c\") pod \"dnsmasq-dns-5f854695bc-pbcwp\" (UID: \"76139240-5b05-4702-a654-4c7905d8f031\") " pod="openstack/dnsmasq-dns-5f854695bc-pbcwp" Dec 10 07:05:42 crc kubenswrapper[4765]: I1210 07:05:42.072289 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3cf4274-779d-4ceb-b45c-69f500042012-config\") pod \"dnsmasq-dns-84bb9d8bd9-gmwzv\" (UID: \"b3cf4274-779d-4ceb-b45c-69f500042012\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-gmwzv" Dec 10 07:05:42 crc kubenswrapper[4765]: I1210 07:05:42.098491 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxv44\" (UniqueName: \"kubernetes.io/projected/b3cf4274-779d-4ceb-b45c-69f500042012-kube-api-access-jxv44\") pod \"dnsmasq-dns-84bb9d8bd9-gmwzv\" (UID: \"b3cf4274-779d-4ceb-b45c-69f500042012\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-gmwzv" Dec 10 07:05:42 crc kubenswrapper[4765]: I1210 07:05:42.172368 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76139240-5b05-4702-a654-4c7905d8f031-config\") pod \"dnsmasq-dns-5f854695bc-pbcwp\" (UID: \"76139240-5b05-4702-a654-4c7905d8f031\") " pod="openstack/dnsmasq-dns-5f854695bc-pbcwp" Dec 10 07:05:42 crc kubenswrapper[4765]: I1210 07:05:42.172456 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76139240-5b05-4702-a654-4c7905d8f031-dns-svc\") pod \"dnsmasq-dns-5f854695bc-pbcwp\" (UID: \"76139240-5b05-4702-a654-4c7905d8f031\") " pod="openstack/dnsmasq-dns-5f854695bc-pbcwp" Dec 10 07:05:42 crc kubenswrapper[4765]: I1210 07:05:42.172506 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvd7c\" (UniqueName: \"kubernetes.io/projected/76139240-5b05-4702-a654-4c7905d8f031-kube-api-access-vvd7c\") pod \"dnsmasq-dns-5f854695bc-pbcwp\" (UID: \"76139240-5b05-4702-a654-4c7905d8f031\") " pod="openstack/dnsmasq-dns-5f854695bc-pbcwp" Dec 10 07:05:42 crc kubenswrapper[4765]: I1210 07:05:42.173373 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76139240-5b05-4702-a654-4c7905d8f031-config\") pod \"dnsmasq-dns-5f854695bc-pbcwp\" (UID: \"76139240-5b05-4702-a654-4c7905d8f031\") " pod="openstack/dnsmasq-dns-5f854695bc-pbcwp" Dec 10 07:05:42 crc kubenswrapper[4765]: I1210 07:05:42.173639 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76139240-5b05-4702-a654-4c7905d8f031-dns-svc\") pod \"dnsmasq-dns-5f854695bc-pbcwp\" (UID: \"76139240-5b05-4702-a654-4c7905d8f031\") " pod="openstack/dnsmasq-dns-5f854695bc-pbcwp" Dec 10 07:05:42 crc kubenswrapper[4765]: I1210 07:05:42.188277 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvd7c\" (UniqueName: \"kubernetes.io/projected/76139240-5b05-4702-a654-4c7905d8f031-kube-api-access-vvd7c\") pod \"dnsmasq-dns-5f854695bc-pbcwp\" (UID: \"76139240-5b05-4702-a654-4c7905d8f031\") " pod="openstack/dnsmasq-dns-5f854695bc-pbcwp" Dec 10 07:05:42 crc kubenswrapper[4765]: I1210 07:05:42.191190 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-gmwzv" Dec 10 07:05:42 crc kubenswrapper[4765]: I1210 07:05:42.288607 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-pbcwp" Dec 10 07:05:42 crc kubenswrapper[4765]: I1210 07:05:42.634704 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-gmwzv"] Dec 10 07:05:42 crc kubenswrapper[4765]: I1210 07:05:42.756682 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-pbcwp"] Dec 10 07:05:42 crc kubenswrapper[4765]: W1210 07:05:42.758268 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76139240_5b05_4702_a654_4c7905d8f031.slice/crio-fdf8332f26a6ef39bb941b639ad19aceca76c0fa2832a17edda7d90237356b70 WatchSource:0}: Error finding container fdf8332f26a6ef39bb941b639ad19aceca76c0fa2832a17edda7d90237356b70: Status 404 returned error can't find the container with id fdf8332f26a6ef39bb941b639ad19aceca76c0fa2832a17edda7d90237356b70 Dec 10 07:05:43 crc kubenswrapper[4765]: I1210 07:05:43.411022 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9d8bd9-gmwzv" event={"ID":"b3cf4274-779d-4ceb-b45c-69f500042012","Type":"ContainerStarted","Data":"fdc6e58b07ba002e3017757b32b54498580f0b4edf2c2e13b41b65d208517c97"} Dec 10 07:05:43 crc kubenswrapper[4765]: I1210 07:05:43.412132 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f854695bc-pbcwp" event={"ID":"76139240-5b05-4702-a654-4c7905d8f031","Type":"ContainerStarted","Data":"fdf8332f26a6ef39bb941b639ad19aceca76c0fa2832a17edda7d90237356b70"} Dec 10 07:05:44 crc kubenswrapper[4765]: I1210 07:05:44.309124 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-pbcwp"] Dec 10 07:05:44 crc kubenswrapper[4765]: I1210 07:05:44.332213 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-c7cbb8f79-hr4vm"] Dec 10 07:05:44 crc kubenswrapper[4765]: I1210 07:05:44.333747 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c7cbb8f79-hr4vm" Dec 10 07:05:44 crc kubenswrapper[4765]: I1210 07:05:44.348777 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c7cbb8f79-hr4vm"] Dec 10 07:05:44 crc kubenswrapper[4765]: I1210 07:05:44.411898 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d-config\") pod \"dnsmasq-dns-c7cbb8f79-hr4vm\" (UID: \"ec23ee9b-035f-41dd-9f54-1ff4596f2f4d\") " pod="openstack/dnsmasq-dns-c7cbb8f79-hr4vm" Dec 10 07:05:44 crc kubenswrapper[4765]: I1210 07:05:44.411942 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d-dns-svc\") pod \"dnsmasq-dns-c7cbb8f79-hr4vm\" (UID: \"ec23ee9b-035f-41dd-9f54-1ff4596f2f4d\") " pod="openstack/dnsmasq-dns-c7cbb8f79-hr4vm" Dec 10 07:05:44 crc kubenswrapper[4765]: I1210 07:05:44.412067 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmcp6\" (UniqueName: \"kubernetes.io/projected/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d-kube-api-access-dmcp6\") pod \"dnsmasq-dns-c7cbb8f79-hr4vm\" (UID: \"ec23ee9b-035f-41dd-9f54-1ff4596f2f4d\") " pod="openstack/dnsmasq-dns-c7cbb8f79-hr4vm" Dec 10 07:05:44 crc kubenswrapper[4765]: I1210 07:05:44.513361 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d-config\") pod \"dnsmasq-dns-c7cbb8f79-hr4vm\" (UID: \"ec23ee9b-035f-41dd-9f54-1ff4596f2f4d\") " pod="openstack/dnsmasq-dns-c7cbb8f79-hr4vm" Dec 10 07:05:44 crc kubenswrapper[4765]: I1210 07:05:44.513527 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d-dns-svc\") pod \"dnsmasq-dns-c7cbb8f79-hr4vm\" (UID: \"ec23ee9b-035f-41dd-9f54-1ff4596f2f4d\") " pod="openstack/dnsmasq-dns-c7cbb8f79-hr4vm" Dec 10 07:05:44 crc kubenswrapper[4765]: I1210 07:05:44.513573 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmcp6\" (UniqueName: \"kubernetes.io/projected/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d-kube-api-access-dmcp6\") pod \"dnsmasq-dns-c7cbb8f79-hr4vm\" (UID: \"ec23ee9b-035f-41dd-9f54-1ff4596f2f4d\") " pod="openstack/dnsmasq-dns-c7cbb8f79-hr4vm" Dec 10 07:05:44 crc kubenswrapper[4765]: I1210 07:05:44.514494 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d-config\") pod \"dnsmasq-dns-c7cbb8f79-hr4vm\" (UID: \"ec23ee9b-035f-41dd-9f54-1ff4596f2f4d\") " pod="openstack/dnsmasq-dns-c7cbb8f79-hr4vm" Dec 10 07:05:44 crc kubenswrapper[4765]: I1210 07:05:44.514559 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d-dns-svc\") pod \"dnsmasq-dns-c7cbb8f79-hr4vm\" (UID: \"ec23ee9b-035f-41dd-9f54-1ff4596f2f4d\") " pod="openstack/dnsmasq-dns-c7cbb8f79-hr4vm" Dec 10 07:05:44 crc kubenswrapper[4765]: I1210 07:05:44.534171 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmcp6\" (UniqueName: \"kubernetes.io/projected/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d-kube-api-access-dmcp6\") pod \"dnsmasq-dns-c7cbb8f79-hr4vm\" (UID: \"ec23ee9b-035f-41dd-9f54-1ff4596f2f4d\") " pod="openstack/dnsmasq-dns-c7cbb8f79-hr4vm" Dec 10 07:05:44 crc kubenswrapper[4765]: I1210 07:05:44.673489 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c7cbb8f79-hr4vm" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.059622 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-gmwzv"] Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.101404 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-545r6"] Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.102972 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-545r6" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.107277 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-545r6"] Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.229011 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78-config\") pod \"dnsmasq-dns-95f5f6995-545r6\" (UID: \"011da8c4-a64f-48c5-89e8-f7f2c5f1cc78\") " pod="openstack/dnsmasq-dns-95f5f6995-545r6" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.229401 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78-dns-svc\") pod \"dnsmasq-dns-95f5f6995-545r6\" (UID: \"011da8c4-a64f-48c5-89e8-f7f2c5f1cc78\") " pod="openstack/dnsmasq-dns-95f5f6995-545r6" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.229615 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lp2xr\" (UniqueName: \"kubernetes.io/projected/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78-kube-api-access-lp2xr\") pod \"dnsmasq-dns-95f5f6995-545r6\" (UID: \"011da8c4-a64f-48c5-89e8-f7f2c5f1cc78\") " pod="openstack/dnsmasq-dns-95f5f6995-545r6" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.274508 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c7cbb8f79-hr4vm"] Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.331424 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lp2xr\" (UniqueName: \"kubernetes.io/projected/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78-kube-api-access-lp2xr\") pod \"dnsmasq-dns-95f5f6995-545r6\" (UID: \"011da8c4-a64f-48c5-89e8-f7f2c5f1cc78\") " pod="openstack/dnsmasq-dns-95f5f6995-545r6" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.331486 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78-config\") pod \"dnsmasq-dns-95f5f6995-545r6\" (UID: \"011da8c4-a64f-48c5-89e8-f7f2c5f1cc78\") " pod="openstack/dnsmasq-dns-95f5f6995-545r6" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.331517 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78-dns-svc\") pod \"dnsmasq-dns-95f5f6995-545r6\" (UID: \"011da8c4-a64f-48c5-89e8-f7f2c5f1cc78\") " pod="openstack/dnsmasq-dns-95f5f6995-545r6" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.332461 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78-dns-svc\") pod \"dnsmasq-dns-95f5f6995-545r6\" (UID: \"011da8c4-a64f-48c5-89e8-f7f2c5f1cc78\") " pod="openstack/dnsmasq-dns-95f5f6995-545r6" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.332562 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78-config\") pod \"dnsmasq-dns-95f5f6995-545r6\" (UID: \"011da8c4-a64f-48c5-89e8-f7f2c5f1cc78\") " pod="openstack/dnsmasq-dns-95f5f6995-545r6" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.351821 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lp2xr\" (UniqueName: \"kubernetes.io/projected/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78-kube-api-access-lp2xr\") pod \"dnsmasq-dns-95f5f6995-545r6\" (UID: \"011da8c4-a64f-48c5-89e8-f7f2c5f1cc78\") " pod="openstack/dnsmasq-dns-95f5f6995-545r6" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.447393 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c7cbb8f79-hr4vm" event={"ID":"ec23ee9b-035f-41dd-9f54-1ff4596f2f4d","Type":"ContainerStarted","Data":"7a785e3f6a9625c8c37a00a0561ef3998bec0561bda0bbc81c92525f44801a0f"} Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.468898 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-545r6" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.501062 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.502361 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.507949 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.508036 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.508214 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.508325 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.508664 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.508874 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-h4mvg" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.509826 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.519095 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.659662 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.659752 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.659914 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.662601 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7d035962-836c-48cf-8ea4-a3e5a23f58f9-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.662685 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.662718 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.662799 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfqtp\" (UniqueName: \"kubernetes.io/projected/7d035962-836c-48cf-8ea4-a3e5a23f58f9-kube-api-access-jfqtp\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.662819 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.662838 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.662862 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.662903 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7d035962-836c-48cf-8ea4-a3e5a23f58f9-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.764665 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.764933 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.764970 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfqtp\" (UniqueName: \"kubernetes.io/projected/7d035962-836c-48cf-8ea4-a3e5a23f58f9-kube-api-access-jfqtp\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.764995 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.765020 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.765049 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.765074 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7d035962-836c-48cf-8ea4-a3e5a23f58f9-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.765165 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.765251 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.765284 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.765380 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7d035962-836c-48cf-8ea4-a3e5a23f58f9-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.765465 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.766421 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.767578 4765 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.768186 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.769765 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.771527 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.783333 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.783939 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7d035962-836c-48cf-8ea4-a3e5a23f58f9-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.784180 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.784719 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7d035962-836c-48cf-8ea4-a3e5a23f58f9-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.791363 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfqtp\" (UniqueName: \"kubernetes.io/projected/7d035962-836c-48cf-8ea4-a3e5a23f58f9-kube-api-access-jfqtp\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.794209 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:45 crc kubenswrapper[4765]: I1210 07:05:45.831224 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.069847 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-545r6"] Dec 10 07:05:46 crc kubenswrapper[4765]: W1210 07:05:46.086973 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod011da8c4_a64f_48c5_89e8_f7f2c5f1cc78.slice/crio-d18ccbac710b416feb24716aeca9c35dcee0df6ef76b0daafb689220b6798b14 WatchSource:0}: Error finding container d18ccbac710b416feb24716aeca9c35dcee0df6ef76b0daafb689220b6798b14: Status 404 returned error can't find the container with id d18ccbac710b416feb24716aeca9c35dcee0df6ef76b0daafb689220b6798b14 Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.239918 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.241593 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.246825 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-8k62n" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.247372 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.247511 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.247628 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.247731 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.247791 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.253271 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.257753 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.379589 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.379657 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.379678 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.379727 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.379756 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2l6j\" (UniqueName: \"kubernetes.io/projected/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-kube-api-access-j2l6j\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.379773 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.379798 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-server-conf\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.379822 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-config-data\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.379837 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-pod-info\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.379875 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.379893 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.460391 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 07:05:46 crc kubenswrapper[4765]: W1210 07:05:46.463307 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7d035962_836c_48cf_8ea4_a3e5a23f58f9.slice/crio-fa211ab29fdde41c1f086ce2d57513215a3c715d127716c2d3ad307419fdecce WatchSource:0}: Error finding container fa211ab29fdde41c1f086ce2d57513215a3c715d127716c2d3ad307419fdecce: Status 404 returned error can't find the container with id fa211ab29fdde41c1f086ce2d57513215a3c715d127716c2d3ad307419fdecce Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.465630 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-545r6" event={"ID":"011da8c4-a64f-48c5-89e8-f7f2c5f1cc78","Type":"ContainerStarted","Data":"d18ccbac710b416feb24716aeca9c35dcee0df6ef76b0daafb689220b6798b14"} Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.482562 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.482700 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2l6j\" (UniqueName: \"kubernetes.io/projected/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-kube-api-access-j2l6j\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.482735 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-server-conf\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.482753 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-config-data\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.482767 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-pod-info\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.482805 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.482823 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.482855 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.482891 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.482908 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.482927 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.483556 4765 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.485258 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.485800 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-server-conf\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.485926 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-config-data\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.486145 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.493986 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.497127 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.501828 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.502805 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.507720 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2l6j\" (UniqueName: \"kubernetes.io/projected/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-kube-api-access-j2l6j\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.516117 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-pod-info\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.527266 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " pod="openstack/rabbitmq-server-0" Dec 10 07:05:46 crc kubenswrapper[4765]: I1210 07:05:46.582352 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.150898 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.475508 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7d035962-836c-48cf-8ea4-a3e5a23f58f9","Type":"ContainerStarted","Data":"fa211ab29fdde41c1f086ce2d57513215a3c715d127716c2d3ad307419fdecce"} Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.477641 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"78b416b3-3796-4fa3-8a4f-7fa6107d98a1","Type":"ContainerStarted","Data":"598c1f7141a2b8fd523fccafc577be90babd1257b3415246bc8317d5bd5122df"} Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.531150 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.535445 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.539966 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.543539 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.543576 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-fvvl7" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.543789 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.554164 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.572444 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.662837 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/949ef1d3-9f74-4052-a482-9fea4e48d374-config-data-generated\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.662892 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.662920 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/949ef1d3-9f74-4052-a482-9fea4e48d374-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.662978 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/949ef1d3-9f74-4052-a482-9fea4e48d374-kolla-config\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.663047 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csxp5\" (UniqueName: \"kubernetes.io/projected/949ef1d3-9f74-4052-a482-9fea4e48d374-kube-api-access-csxp5\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.663195 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/949ef1d3-9f74-4052-a482-9fea4e48d374-operator-scripts\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.663260 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/949ef1d3-9f74-4052-a482-9fea4e48d374-config-data-default\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.663496 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/949ef1d3-9f74-4052-a482-9fea4e48d374-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.765164 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/949ef1d3-9f74-4052-a482-9fea4e48d374-operator-scripts\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.765553 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/949ef1d3-9f74-4052-a482-9fea4e48d374-config-data-default\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.765593 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/949ef1d3-9f74-4052-a482-9fea4e48d374-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.765677 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/949ef1d3-9f74-4052-a482-9fea4e48d374-config-data-generated\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.765711 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.765731 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/949ef1d3-9f74-4052-a482-9fea4e48d374-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.765748 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/949ef1d3-9f74-4052-a482-9fea4e48d374-kolla-config\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.765784 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csxp5\" (UniqueName: \"kubernetes.io/projected/949ef1d3-9f74-4052-a482-9fea4e48d374-kube-api-access-csxp5\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.766353 4765 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.766883 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/949ef1d3-9f74-4052-a482-9fea4e48d374-operator-scripts\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.766924 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/949ef1d3-9f74-4052-a482-9fea4e48d374-config-data-default\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.767418 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/949ef1d3-9f74-4052-a482-9fea4e48d374-kolla-config\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.767455 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/949ef1d3-9f74-4052-a482-9fea4e48d374-config-data-generated\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.774805 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/949ef1d3-9f74-4052-a482-9fea4e48d374-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.792507 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/949ef1d3-9f74-4052-a482-9fea4e48d374-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.796187 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csxp5\" (UniqueName: \"kubernetes.io/projected/949ef1d3-9f74-4052-a482-9fea4e48d374-kube-api-access-csxp5\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.796615 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " pod="openstack/openstack-galera-0" Dec 10 07:05:47 crc kubenswrapper[4765]: I1210 07:05:47.888280 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 10 07:05:48 crc kubenswrapper[4765]: I1210 07:05:48.827699 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 10 07:05:48 crc kubenswrapper[4765]: W1210 07:05:48.863104 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod949ef1d3_9f74_4052_a482_9fea4e48d374.slice/crio-4ddd196c5d5d82339f70f5aa6db6d91db526f82233b82f3a037a74712e69cbb4 WatchSource:0}: Error finding container 4ddd196c5d5d82339f70f5aa6db6d91db526f82233b82f3a037a74712e69cbb4: Status 404 returned error can't find the container with id 4ddd196c5d5d82339f70f5aa6db6d91db526f82233b82f3a037a74712e69cbb4 Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.289221 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.292718 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.297143 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.297236 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.305571 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.305786 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-vjphj" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.336239 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.348270 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.349264 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.363126 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-t88cn" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.363572 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.363704 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.432502 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.465201 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/27b317a6-1f99-4951-a064-e8ca8a38dc94-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.465310 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/27b317a6-1f99-4951-a064-e8ca8a38dc94-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.465372 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.465424 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27b317a6-1f99-4951-a064-e8ca8a38dc94-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.465465 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27b317a6-1f99-4951-a064-e8ca8a38dc94-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.465513 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/27b317a6-1f99-4951-a064-e8ca8a38dc94-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.465538 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nkww\" (UniqueName: \"kubernetes.io/projected/27b317a6-1f99-4951-a064-e8ca8a38dc94-kube-api-access-4nkww\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.465594 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/27b317a6-1f99-4951-a064-e8ca8a38dc94-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.547376 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"949ef1d3-9f74-4052-a482-9fea4e48d374","Type":"ContainerStarted","Data":"4ddd196c5d5d82339f70f5aa6db6d91db526f82233b82f3a037a74712e69cbb4"} Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.566827 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/655c5f62-880e-4e05-9db8-da19844facdf-kolla-config\") pod \"memcached-0\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " pod="openstack/memcached-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.566916 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/27b317a6-1f99-4951-a064-e8ca8a38dc94-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.566949 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.566985 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27b317a6-1f99-4951-a064-e8ca8a38dc94-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.567015 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/655c5f62-880e-4e05-9db8-da19844facdf-memcached-tls-certs\") pod \"memcached-0\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " pod="openstack/memcached-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.567047 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27b317a6-1f99-4951-a064-e8ca8a38dc94-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.567075 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/27b317a6-1f99-4951-a064-e8ca8a38dc94-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.567159 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/655c5f62-880e-4e05-9db8-da19844facdf-config-data\") pod \"memcached-0\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " pod="openstack/memcached-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.567187 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nkww\" (UniqueName: \"kubernetes.io/projected/27b317a6-1f99-4951-a064-e8ca8a38dc94-kube-api-access-4nkww\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.567227 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgmdz\" (UniqueName: \"kubernetes.io/projected/655c5f62-880e-4e05-9db8-da19844facdf-kube-api-access-bgmdz\") pod \"memcached-0\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " pod="openstack/memcached-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.567255 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/27b317a6-1f99-4951-a064-e8ca8a38dc94-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.567293 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655c5f62-880e-4e05-9db8-da19844facdf-combined-ca-bundle\") pod \"memcached-0\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " pod="openstack/memcached-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.567344 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/27b317a6-1f99-4951-a064-e8ca8a38dc94-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.579236 4765 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.580578 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/27b317a6-1f99-4951-a064-e8ca8a38dc94-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.580823 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/27b317a6-1f99-4951-a064-e8ca8a38dc94-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.580900 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/27b317a6-1f99-4951-a064-e8ca8a38dc94-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.582227 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27b317a6-1f99-4951-a064-e8ca8a38dc94-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.595918 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/27b317a6-1f99-4951-a064-e8ca8a38dc94-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.599870 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27b317a6-1f99-4951-a064-e8ca8a38dc94-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.620483 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.621017 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nkww\" (UniqueName: \"kubernetes.io/projected/27b317a6-1f99-4951-a064-e8ca8a38dc94-kube-api-access-4nkww\") pod \"openstack-cell1-galera-0\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.668690 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/655c5f62-880e-4e05-9db8-da19844facdf-memcached-tls-certs\") pod \"memcached-0\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " pod="openstack/memcached-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.668762 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/655c5f62-880e-4e05-9db8-da19844facdf-config-data\") pod \"memcached-0\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " pod="openstack/memcached-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.668795 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgmdz\" (UniqueName: \"kubernetes.io/projected/655c5f62-880e-4e05-9db8-da19844facdf-kube-api-access-bgmdz\") pod \"memcached-0\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " pod="openstack/memcached-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.668838 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655c5f62-880e-4e05-9db8-da19844facdf-combined-ca-bundle\") pod \"memcached-0\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " pod="openstack/memcached-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.668907 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/655c5f62-880e-4e05-9db8-da19844facdf-kolla-config\") pod \"memcached-0\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " pod="openstack/memcached-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.673019 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.685702 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655c5f62-880e-4e05-9db8-da19844facdf-combined-ca-bundle\") pod \"memcached-0\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " pod="openstack/memcached-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.692726 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/655c5f62-880e-4e05-9db8-da19844facdf-memcached-tls-certs\") pod \"memcached-0\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " pod="openstack/memcached-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.697909 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/655c5f62-880e-4e05-9db8-da19844facdf-config-data\") pod \"memcached-0\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " pod="openstack/memcached-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.698466 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/655c5f62-880e-4e05-9db8-da19844facdf-kolla-config\") pod \"memcached-0\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " pod="openstack/memcached-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.714702 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgmdz\" (UniqueName: \"kubernetes.io/projected/655c5f62-880e-4e05-9db8-da19844facdf-kube-api-access-bgmdz\") pod \"memcached-0\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " pod="openstack/memcached-0" Dec 10 07:05:49 crc kubenswrapper[4765]: I1210 07:05:49.727002 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 10 07:05:50 crc kubenswrapper[4765]: I1210 07:05:50.212540 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 10 07:05:50 crc kubenswrapper[4765]: I1210 07:05:50.359930 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 10 07:05:50 crc kubenswrapper[4765]: I1210 07:05:50.564579 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"27b317a6-1f99-4951-a064-e8ca8a38dc94","Type":"ContainerStarted","Data":"5faea9cb8c82b1ef0e80b974615ade74d087297cfe0e8e6ccff9010d9f9f35ac"} Dec 10 07:05:50 crc kubenswrapper[4765]: I1210 07:05:50.566975 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"655c5f62-880e-4e05-9db8-da19844facdf","Type":"ContainerStarted","Data":"5ef7aedd74056f10d17cbb5e760a66ab97a01ecc4c8d1a7ab568a61774158e45"} Dec 10 07:05:50 crc kubenswrapper[4765]: I1210 07:05:50.950178 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 07:05:50 crc kubenswrapper[4765]: I1210 07:05:50.951558 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 07:05:50 crc kubenswrapper[4765]: I1210 07:05:50.955261 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-gn78v" Dec 10 07:05:50 crc kubenswrapper[4765]: I1210 07:05:50.962249 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 07:05:50 crc kubenswrapper[4765]: I1210 07:05:50.998535 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nn92c\" (UniqueName: \"kubernetes.io/projected/e773fc35-349d-4256-b0e9-843aaa6dd6c3-kube-api-access-nn92c\") pod \"kube-state-metrics-0\" (UID: \"e773fc35-349d-4256-b0e9-843aaa6dd6c3\") " pod="openstack/kube-state-metrics-0" Dec 10 07:05:51 crc kubenswrapper[4765]: I1210 07:05:51.102497 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nn92c\" (UniqueName: \"kubernetes.io/projected/e773fc35-349d-4256-b0e9-843aaa6dd6c3-kube-api-access-nn92c\") pod \"kube-state-metrics-0\" (UID: \"e773fc35-349d-4256-b0e9-843aaa6dd6c3\") " pod="openstack/kube-state-metrics-0" Dec 10 07:05:51 crc kubenswrapper[4765]: I1210 07:05:51.133217 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nn92c\" (UniqueName: \"kubernetes.io/projected/e773fc35-349d-4256-b0e9-843aaa6dd6c3-kube-api-access-nn92c\") pod \"kube-state-metrics-0\" (UID: \"e773fc35-349d-4256-b0e9-843aaa6dd6c3\") " pod="openstack/kube-state-metrics-0" Dec 10 07:05:51 crc kubenswrapper[4765]: I1210 07:05:51.286821 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 07:05:55 crc kubenswrapper[4765]: I1210 07:05:55.849392 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 10 07:05:55 crc kubenswrapper[4765]: I1210 07:05:55.855129 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:55 crc kubenswrapper[4765]: I1210 07:05:55.859524 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 10 07:05:55 crc kubenswrapper[4765]: I1210 07:05:55.859820 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-2kzcx" Dec 10 07:05:55 crc kubenswrapper[4765]: I1210 07:05:55.860004 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 10 07:05:55 crc kubenswrapper[4765]: I1210 07:05:55.861741 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 10 07:05:55 crc kubenswrapper[4765]: I1210 07:05:55.862042 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 10 07:05:55 crc kubenswrapper[4765]: I1210 07:05:55.877437 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.022634 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.022710 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.022745 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.022768 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-config\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.022785 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.022831 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkrzg\" (UniqueName: \"kubernetes.io/projected/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-kube-api-access-wkrzg\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.022952 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.024137 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.125255 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkrzg\" (UniqueName: \"kubernetes.io/projected/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-kube-api-access-wkrzg\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.125627 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.125667 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.125722 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.125780 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.125819 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.125842 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-config\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.125860 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.126270 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.132825 4765 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.135600 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.136996 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.143255 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.148520 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-config\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.149704 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.152416 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkrzg\" (UniqueName: \"kubernetes.io/projected/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-kube-api-access-wkrzg\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.177338 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.188632 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.562301 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-hxr5k"] Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.563900 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.571875 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.572268 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.575526 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-p4mws" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.624783 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-v6h5d"] Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.626710 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hxr5k"] Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.626747 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-v6h5d"] Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.626846 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.733575 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-combined-ca-bundle\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.733664 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-var-run-ovn\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.733695 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-ovn-controller-tls-certs\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.733747 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-etc-ovs\") pod \"ovn-controller-ovs-v6h5d\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.733767 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6tc9\" (UniqueName: \"kubernetes.io/projected/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-kube-api-access-r6tc9\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.733790 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-var-log\") pod \"ovn-controller-ovs-v6h5d\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.733809 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-var-log-ovn\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.733827 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqlnr\" (UniqueName: \"kubernetes.io/projected/209844a2-e0ac-447f-99f6-28cd864ca648-kube-api-access-jqlnr\") pod \"ovn-controller-ovs-v6h5d\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.733848 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-scripts\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.733876 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-var-lib\") pod \"ovn-controller-ovs-v6h5d\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.733899 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-var-run\") pod \"ovn-controller-ovs-v6h5d\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.733913 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/209844a2-e0ac-447f-99f6-28cd864ca648-scripts\") pod \"ovn-controller-ovs-v6h5d\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.733936 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-var-run\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.839464 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-combined-ca-bundle\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.839513 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-var-run-ovn\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.839545 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-ovn-controller-tls-certs\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.839581 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-etc-ovs\") pod \"ovn-controller-ovs-v6h5d\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.839602 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6tc9\" (UniqueName: \"kubernetes.io/projected/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-kube-api-access-r6tc9\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.839625 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-var-log\") pod \"ovn-controller-ovs-v6h5d\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.839645 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-var-log-ovn\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.839669 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqlnr\" (UniqueName: \"kubernetes.io/projected/209844a2-e0ac-447f-99f6-28cd864ca648-kube-api-access-jqlnr\") pod \"ovn-controller-ovs-v6h5d\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.839689 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-scripts\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.839713 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-var-lib\") pod \"ovn-controller-ovs-v6h5d\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.839734 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-var-run\") pod \"ovn-controller-ovs-v6h5d\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.839748 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/209844a2-e0ac-447f-99f6-28cd864ca648-scripts\") pod \"ovn-controller-ovs-v6h5d\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.839769 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-var-run\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.840541 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-var-run\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.841586 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-var-log-ovn\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.841712 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-var-run-ovn\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.844783 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-combined-ca-bundle\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.844817 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-var-run\") pod \"ovn-controller-ovs-v6h5d\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.844801 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-etc-ovs\") pod \"ovn-controller-ovs-v6h5d\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.844937 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-var-log\") pod \"ovn-controller-ovs-v6h5d\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.844921 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-var-lib\") pod \"ovn-controller-ovs-v6h5d\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.845323 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-ovn-controller-tls-certs\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.846189 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-scripts\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.847271 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/209844a2-e0ac-447f-99f6-28cd864ca648-scripts\") pod \"ovn-controller-ovs-v6h5d\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.861432 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6tc9\" (UniqueName: \"kubernetes.io/projected/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-kube-api-access-r6tc9\") pod \"ovn-controller-hxr5k\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.886632 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqlnr\" (UniqueName: \"kubernetes.io/projected/209844a2-e0ac-447f-99f6-28cd864ca648-kube-api-access-jqlnr\") pod \"ovn-controller-ovs-v6h5d\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.897463 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxr5k" Dec 10 07:05:56 crc kubenswrapper[4765]: I1210 07:05:56.999263 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.492068 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.499594 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.502277 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.506710 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.507034 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-dpjn4" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.507235 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.520363 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.671804 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.671917 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.671995 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.672017 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-config\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.672049 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.672108 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.672128 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.672163 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrqzv\" (UniqueName: \"kubernetes.io/projected/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-kube-api-access-wrqzv\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.773633 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.773721 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.773752 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.773799 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrqzv\" (UniqueName: \"kubernetes.io/projected/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-kube-api-access-wrqzv\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.773866 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.773895 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.773942 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.773961 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-config\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.774619 4765 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.774910 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-config\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.775132 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.775956 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.779339 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.779900 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.780220 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.792430 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrqzv\" (UniqueName: \"kubernetes.io/projected/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-kube-api-access-wrqzv\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.804843 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " pod="openstack/ovsdbserver-sb-0" Dec 10 07:05:58 crc kubenswrapper[4765]: I1210 07:05:58.818507 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 10 07:06:18 crc kubenswrapper[4765]: E1210 07:06:18.478621 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13" Dec 10 07:06:18 crc kubenswrapper[4765]: E1210 07:06:18.479555 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-csxp5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(949ef1d3-9f74-4052-a482-9fea4e48d374): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:06:18 crc kubenswrapper[4765]: E1210 07:06:18.480766 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="949ef1d3-9f74-4052-a482-9fea4e48d374" Dec 10 07:06:18 crc kubenswrapper[4765]: E1210 07:06:18.929746 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13\\\"\"" pod="openstack/openstack-galera-0" podUID="949ef1d3-9f74-4052-a482-9fea4e48d374" Dec 10 07:06:19 crc kubenswrapper[4765]: E1210 07:06:19.984268 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d" Dec 10 07:06:19 crc kubenswrapper[4765]: E1210 07:06:19.984892 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jfqtp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(7d035962-836c-48cf-8ea4-a3e5a23f58f9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:06:19 crc kubenswrapper[4765]: E1210 07:06:19.986464 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="7d035962-836c-48cf-8ea4-a3e5a23f58f9" Dec 10 07:06:20 crc kubenswrapper[4765]: E1210 07:06:20.092839 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d" Dec 10 07:06:20 crc kubenswrapper[4765]: E1210 07:06:20.093054 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j2l6j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(78b416b3-3796-4fa3-8a4f-7fa6107d98a1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:06:20 crc kubenswrapper[4765]: E1210 07:06:20.094658 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="78b416b3-3796-4fa3-8a4f-7fa6107d98a1" Dec 10 07:06:20 crc kubenswrapper[4765]: I1210 07:06:20.732554 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 10 07:06:20 crc kubenswrapper[4765]: E1210 07:06:20.946704 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d\\\"\"" pod="openstack/rabbitmq-server-0" podUID="78b416b3-3796-4fa3-8a4f-7fa6107d98a1" Dec 10 07:06:20 crc kubenswrapper[4765]: E1210 07:06:20.947365 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="7d035962-836c-48cf-8ea4-a3e5a23f58f9" Dec 10 07:06:24 crc kubenswrapper[4765]: I1210 07:06:24.974935 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e","Type":"ContainerStarted","Data":"b110cc915a19b85f1c5643901f9c9c0dc3bc02950a2a678f0ee46627af709b5b"} Dec 10 07:06:27 crc kubenswrapper[4765]: E1210 07:06:27.796299 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Dec 10 07:06:27 crc kubenswrapper[4765]: E1210 07:06:27.797055 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vvd7c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5f854695bc-pbcwp_openstack(76139240-5b05-4702-a654-4c7905d8f031): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:06:27 crc kubenswrapper[4765]: E1210 07:06:27.798660 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5f854695bc-pbcwp" podUID="76139240-5b05-4702-a654-4c7905d8f031" Dec 10 07:06:27 crc kubenswrapper[4765]: E1210 07:06:27.834941 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Dec 10 07:06:27 crc kubenswrapper[4765]: E1210 07:06:27.835170 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jxv44,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-84bb9d8bd9-gmwzv_openstack(b3cf4274-779d-4ceb-b45c-69f500042012): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:06:27 crc kubenswrapper[4765]: E1210 07:06:27.836287 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-84bb9d8bd9-gmwzv" podUID="b3cf4274-779d-4ceb-b45c-69f500042012" Dec 10 07:06:27 crc kubenswrapper[4765]: E1210 07:06:27.867122 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Dec 10 07:06:27 crc kubenswrapper[4765]: E1210 07:06:27.867425 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nfdh5dfhb6h64h676hc4h78h97h669h54chfbh696hb5h54bh5d4h6bh64h644h677h584h5cbh698h9dh5bbh5f8h5b8hcdh644h5c7h694hbfh589q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dmcp6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-c7cbb8f79-hr4vm_openstack(ec23ee9b-035f-41dd-9f54-1ff4596f2f4d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:06:27 crc kubenswrapper[4765]: E1210 07:06:27.870493 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-c7cbb8f79-hr4vm" podUID="ec23ee9b-035f-41dd-9f54-1ff4596f2f4d" Dec 10 07:06:27 crc kubenswrapper[4765]: E1210 07:06:27.873440 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Dec 10 07:06:27 crc kubenswrapper[4765]: E1210 07:06:27.873555 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lp2xr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-95f5f6995-545r6_openstack(011da8c4-a64f-48c5-89e8-f7f2c5f1cc78): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:06:27 crc kubenswrapper[4765]: E1210 07:06:27.875043 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-95f5f6995-545r6" podUID="011da8c4-a64f-48c5-89e8-f7f2c5f1cc78" Dec 10 07:06:28 crc kubenswrapper[4765]: E1210 07:06:28.017621 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33\\\"\"" pod="openstack/dnsmasq-dns-c7cbb8f79-hr4vm" podUID="ec23ee9b-035f-41dd-9f54-1ff4596f2f4d" Dec 10 07:06:28 crc kubenswrapper[4765]: E1210 07:06:28.017962 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33\\\"\"" pod="openstack/dnsmasq-dns-95f5f6995-545r6" podUID="011da8c4-a64f-48c5-89e8-f7f2c5f1cc78" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.515016 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.534038 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-v6h5d"] Dec 10 07:06:28 crc kubenswrapper[4765]: W1210 07:06:28.554831 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod209844a2_e0ac_447f_99f6_28cd864ca648.slice/crio-8c9e44c5075dd308c10c91908b1b3130726915aed6ef884b3965f357982cf285 WatchSource:0}: Error finding container 8c9e44c5075dd308c10c91908b1b3130726915aed6ef884b3965f357982cf285: Status 404 returned error can't find the container with id 8c9e44c5075dd308c10c91908b1b3130726915aed6ef884b3965f357982cf285 Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.675219 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-pbcwp" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.696737 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76139240-5b05-4702-a654-4c7905d8f031-config\") pod \"76139240-5b05-4702-a654-4c7905d8f031\" (UID: \"76139240-5b05-4702-a654-4c7905d8f031\") " Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.696804 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76139240-5b05-4702-a654-4c7905d8f031-dns-svc\") pod \"76139240-5b05-4702-a654-4c7905d8f031\" (UID: \"76139240-5b05-4702-a654-4c7905d8f031\") " Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.696842 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvd7c\" (UniqueName: \"kubernetes.io/projected/76139240-5b05-4702-a654-4c7905d8f031-kube-api-access-vvd7c\") pod \"76139240-5b05-4702-a654-4c7905d8f031\" (UID: \"76139240-5b05-4702-a654-4c7905d8f031\") " Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.697641 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76139240-5b05-4702-a654-4c7905d8f031-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "76139240-5b05-4702-a654-4c7905d8f031" (UID: "76139240-5b05-4702-a654-4c7905d8f031"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.698208 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76139240-5b05-4702-a654-4c7905d8f031-config" (OuterVolumeSpecName: "config") pod "76139240-5b05-4702-a654-4c7905d8f031" (UID: "76139240-5b05-4702-a654-4c7905d8f031"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.698767 4765 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76139240-5b05-4702-a654-4c7905d8f031-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.706288 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76139240-5b05-4702-a654-4c7905d8f031-kube-api-access-vvd7c" (OuterVolumeSpecName: "kube-api-access-vvd7c") pod "76139240-5b05-4702-a654-4c7905d8f031" (UID: "76139240-5b05-4702-a654-4c7905d8f031"). InnerVolumeSpecName "kube-api-access-vvd7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.724719 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-gmwzv" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.748260 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.800801 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxv44\" (UniqueName: \"kubernetes.io/projected/b3cf4274-779d-4ceb-b45c-69f500042012-kube-api-access-jxv44\") pod \"b3cf4274-779d-4ceb-b45c-69f500042012\" (UID: \"b3cf4274-779d-4ceb-b45c-69f500042012\") " Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.800875 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3cf4274-779d-4ceb-b45c-69f500042012-config\") pod \"b3cf4274-779d-4ceb-b45c-69f500042012\" (UID: \"b3cf4274-779d-4ceb-b45c-69f500042012\") " Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.801328 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76139240-5b05-4702-a654-4c7905d8f031-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.801342 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvd7c\" (UniqueName: \"kubernetes.io/projected/76139240-5b05-4702-a654-4c7905d8f031-kube-api-access-vvd7c\") on node \"crc\" DevicePath \"\"" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.801814 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3cf4274-779d-4ceb-b45c-69f500042012-config" (OuterVolumeSpecName: "config") pod "b3cf4274-779d-4ceb-b45c-69f500042012" (UID: "b3cf4274-779d-4ceb-b45c-69f500042012"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.804638 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3cf4274-779d-4ceb-b45c-69f500042012-kube-api-access-jxv44" (OuterVolumeSpecName: "kube-api-access-jxv44") pod "b3cf4274-779d-4ceb-b45c-69f500042012" (UID: "b3cf4274-779d-4ceb-b45c-69f500042012"). InnerVolumeSpecName "kube-api-access-jxv44". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.870458 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-hhk4h"] Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.871460 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.877973 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.886328 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-hhk4h"] Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.903274 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/416ec9f7-82f7-4eb1-a936-51038c6da878-ovn-rundir\") pod \"ovn-controller-metrics-hhk4h\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.903335 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/416ec9f7-82f7-4eb1-a936-51038c6da878-ovs-rundir\") pod \"ovn-controller-metrics-hhk4h\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.903786 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/416ec9f7-82f7-4eb1-a936-51038c6da878-config\") pod \"ovn-controller-metrics-hhk4h\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.903831 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/416ec9f7-82f7-4eb1-a936-51038c6da878-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-hhk4h\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.903856 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfxdr\" (UniqueName: \"kubernetes.io/projected/416ec9f7-82f7-4eb1-a936-51038c6da878-kube-api-access-sfxdr\") pod \"ovn-controller-metrics-hhk4h\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.903904 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/416ec9f7-82f7-4eb1-a936-51038c6da878-combined-ca-bundle\") pod \"ovn-controller-metrics-hhk4h\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.903979 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxv44\" (UniqueName: \"kubernetes.io/projected/b3cf4274-779d-4ceb-b45c-69f500042012-kube-api-access-jxv44\") on node \"crc\" DevicePath \"\"" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.903997 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3cf4274-779d-4ceb-b45c-69f500042012-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:06:28 crc kubenswrapper[4765]: I1210 07:06:28.915580 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hxr5k"] Dec 10 07:06:28 crc kubenswrapper[4765]: W1210 07:06:28.955707 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b2c99d6_f2e1_4c1c_8825_e8c62d00d133.slice/crio-685b0cffb1b0fbd1627ca40e97cecc2329b1529f247aadd35a667c92ac186781 WatchSource:0}: Error finding container 685b0cffb1b0fbd1627ca40e97cecc2329b1529f247aadd35a667c92ac186781: Status 404 returned error can't find the container with id 685b0cffb1b0fbd1627ca40e97cecc2329b1529f247aadd35a667c92ac186781 Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.005136 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfxdr\" (UniqueName: \"kubernetes.io/projected/416ec9f7-82f7-4eb1-a936-51038c6da878-kube-api-access-sfxdr\") pod \"ovn-controller-metrics-hhk4h\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.005228 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/416ec9f7-82f7-4eb1-a936-51038c6da878-combined-ca-bundle\") pod \"ovn-controller-metrics-hhk4h\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.005295 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/416ec9f7-82f7-4eb1-a936-51038c6da878-ovn-rundir\") pod \"ovn-controller-metrics-hhk4h\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.005342 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/416ec9f7-82f7-4eb1-a936-51038c6da878-ovs-rundir\") pod \"ovn-controller-metrics-hhk4h\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.005410 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/416ec9f7-82f7-4eb1-a936-51038c6da878-config\") pod \"ovn-controller-metrics-hhk4h\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.005448 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/416ec9f7-82f7-4eb1-a936-51038c6da878-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-hhk4h\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.005859 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/416ec9f7-82f7-4eb1-a936-51038c6da878-ovs-rundir\") pod \"ovn-controller-metrics-hhk4h\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.005872 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/416ec9f7-82f7-4eb1-a936-51038c6da878-ovn-rundir\") pod \"ovn-controller-metrics-hhk4h\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.006217 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/416ec9f7-82f7-4eb1-a936-51038c6da878-config\") pod \"ovn-controller-metrics-hhk4h\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.011189 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/416ec9f7-82f7-4eb1-a936-51038c6da878-combined-ca-bundle\") pod \"ovn-controller-metrics-hhk4h\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.014804 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/416ec9f7-82f7-4eb1-a936-51038c6da878-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-hhk4h\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.040116 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfxdr\" (UniqueName: \"kubernetes.io/projected/416ec9f7-82f7-4eb1-a936-51038c6da878-kube-api-access-sfxdr\") pod \"ovn-controller-metrics-hhk4h\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.049569 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c7cbb8f79-hr4vm"] Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.068600 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"655c5f62-880e-4e05-9db8-da19844facdf","Type":"ContainerStarted","Data":"b203f59bc5f85514c464b901df4f284e9f82d4b70b43ff25aa23de50dd64c527"} Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.069615 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.075796 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x"] Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.083831 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.084254 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e773fc35-349d-4256-b0e9-843aaa6dd6c3","Type":"ContainerStarted","Data":"09c5a27b8a3ade82dd5948c7c41fda14e9b878ac7519b79f94446e371a8a4955"} Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.087681 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.092994 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x"] Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.096672 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f854695bc-pbcwp" event={"ID":"76139240-5b05-4702-a654-4c7905d8f031","Type":"ContainerDied","Data":"fdf8332f26a6ef39bb941b639ad19aceca76c0fa2832a17edda7d90237356b70"} Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.096810 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-pbcwp" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.106820 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4934a96a-3419-4623-9f64-58e8d9206864-dns-svc\") pod \"dnsmasq-dns-7bbdc7ccd7-9qs4x\" (UID: \"4934a96a-3419-4623-9f64-58e8d9206864\") " pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.106972 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4934a96a-3419-4623-9f64-58e8d9206864-ovsdbserver-nb\") pod \"dnsmasq-dns-7bbdc7ccd7-9qs4x\" (UID: \"4934a96a-3419-4623-9f64-58e8d9206864\") " pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.106999 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4934a96a-3419-4623-9f64-58e8d9206864-config\") pod \"dnsmasq-dns-7bbdc7ccd7-9qs4x\" (UID: \"4934a96a-3419-4623-9f64-58e8d9206864\") " pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.107025 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsgf7\" (UniqueName: \"kubernetes.io/projected/4934a96a-3419-4623-9f64-58e8d9206864-kube-api-access-hsgf7\") pod \"dnsmasq-dns-7bbdc7ccd7-9qs4x\" (UID: \"4934a96a-3419-4623-9f64-58e8d9206864\") " pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.128834 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"fb882c24-ec9a-4e19-99ac-b6f96c420cb5","Type":"ContainerStarted","Data":"38895890daf952bdf1d411aa5eebdb9892db798a207cb43a4f205adfa0027de0"} Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.132912 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-v6h5d" event={"ID":"209844a2-e0ac-447f-99f6-28cd864ca648","Type":"ContainerStarted","Data":"8c9e44c5075dd308c10c91908b1b3130726915aed6ef884b3965f357982cf285"} Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.141413 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9d8bd9-gmwzv" event={"ID":"b3cf4274-779d-4ceb-b45c-69f500042012","Type":"ContainerDied","Data":"fdc6e58b07ba002e3017757b32b54498580f0b4edf2c2e13b41b65d208517c97"} Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.141555 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-gmwzv" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.143246 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxr5k" event={"ID":"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133","Type":"ContainerStarted","Data":"685b0cffb1b0fbd1627ca40e97cecc2329b1529f247aadd35a667c92ac186781"} Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.145598 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"27b317a6-1f99-4951-a064-e8ca8a38dc94","Type":"ContainerStarted","Data":"121f574801a39c842acc28bbf6ac95cb142b8de010246a252860a5bfffa998f0"} Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.241997 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4934a96a-3419-4623-9f64-58e8d9206864-dns-svc\") pod \"dnsmasq-dns-7bbdc7ccd7-9qs4x\" (UID: \"4934a96a-3419-4623-9f64-58e8d9206864\") " pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.242105 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4934a96a-3419-4623-9f64-58e8d9206864-ovsdbserver-nb\") pod \"dnsmasq-dns-7bbdc7ccd7-9qs4x\" (UID: \"4934a96a-3419-4623-9f64-58e8d9206864\") " pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.242139 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4934a96a-3419-4623-9f64-58e8d9206864-config\") pod \"dnsmasq-dns-7bbdc7ccd7-9qs4x\" (UID: \"4934a96a-3419-4623-9f64-58e8d9206864\") " pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.242167 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsgf7\" (UniqueName: \"kubernetes.io/projected/4934a96a-3419-4623-9f64-58e8d9206864-kube-api-access-hsgf7\") pod \"dnsmasq-dns-7bbdc7ccd7-9qs4x\" (UID: \"4934a96a-3419-4623-9f64-58e8d9206864\") " pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.242563 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.243703 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4934a96a-3419-4623-9f64-58e8d9206864-dns-svc\") pod \"dnsmasq-dns-7bbdc7ccd7-9qs4x\" (UID: \"4934a96a-3419-4623-9f64-58e8d9206864\") " pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.244273 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4934a96a-3419-4623-9f64-58e8d9206864-ovsdbserver-nb\") pod \"dnsmasq-dns-7bbdc7ccd7-9qs4x\" (UID: \"4934a96a-3419-4623-9f64-58e8d9206864\") " pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.244756 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4934a96a-3419-4623-9f64-58e8d9206864-config\") pod \"dnsmasq-dns-7bbdc7ccd7-9qs4x\" (UID: \"4934a96a-3419-4623-9f64-58e8d9206864\") " pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.255929 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.674296056 podStartE2EDuration="40.2559083s" podCreationTimestamp="2025-12-10 07:05:49 +0000 UTC" firstStartedPulling="2025-12-10 07:05:50.251212612 +0000 UTC m=+1069.977877928" lastFinishedPulling="2025-12-10 07:06:27.832824836 +0000 UTC m=+1107.559490172" observedRunningTime="2025-12-10 07:06:29.22636936 +0000 UTC m=+1108.953034686" watchObservedRunningTime="2025-12-10 07:06:29.2559083 +0000 UTC m=+1108.982573616" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.260319 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-545r6"] Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.284867 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsgf7\" (UniqueName: \"kubernetes.io/projected/4934a96a-3419-4623-9f64-58e8d9206864-kube-api-access-hsgf7\") pod \"dnsmasq-dns-7bbdc7ccd7-9qs4x\" (UID: \"4934a96a-3419-4623-9f64-58e8d9206864\") " pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.294447 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-757dc6fff9-hqhxd"] Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.295861 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.310445 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.378287 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757dc6fff9-hqhxd"] Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.444762 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-config\") pod \"dnsmasq-dns-757dc6fff9-hqhxd\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.445036 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-ovsdbserver-nb\") pod \"dnsmasq-dns-757dc6fff9-hqhxd\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.445109 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-ovsdbserver-sb\") pod \"dnsmasq-dns-757dc6fff9-hqhxd\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.445207 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qv6tw\" (UniqueName: \"kubernetes.io/projected/ddcadac8-8742-4e99-9838-d62fced40efa-kube-api-access-qv6tw\") pod \"dnsmasq-dns-757dc6fff9-hqhxd\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.445243 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-dns-svc\") pod \"dnsmasq-dns-757dc6fff9-hqhxd\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.529185 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-gmwzv"] Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.533111 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.537393 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-gmwzv"] Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.547404 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-ovsdbserver-sb\") pod \"dnsmasq-dns-757dc6fff9-hqhxd\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.547491 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qv6tw\" (UniqueName: \"kubernetes.io/projected/ddcadac8-8742-4e99-9838-d62fced40efa-kube-api-access-qv6tw\") pod \"dnsmasq-dns-757dc6fff9-hqhxd\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.547553 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-dns-svc\") pod \"dnsmasq-dns-757dc6fff9-hqhxd\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.547597 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-config\") pod \"dnsmasq-dns-757dc6fff9-hqhxd\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.547653 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-ovsdbserver-nb\") pod \"dnsmasq-dns-757dc6fff9-hqhxd\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.548525 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-ovsdbserver-nb\") pod \"dnsmasq-dns-757dc6fff9-hqhxd\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.549282 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-ovsdbserver-sb\") pod \"dnsmasq-dns-757dc6fff9-hqhxd\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.551561 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-config\") pod \"dnsmasq-dns-757dc6fff9-hqhxd\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.552357 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-dns-svc\") pod \"dnsmasq-dns-757dc6fff9-hqhxd\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.555020 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-pbcwp"] Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.574314 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-pbcwp"] Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.576462 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qv6tw\" (UniqueName: \"kubernetes.io/projected/ddcadac8-8742-4e99-9838-d62fced40efa-kube-api-access-qv6tw\") pod \"dnsmasq-dns-757dc6fff9-hqhxd\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.687066 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c7cbb8f79-hr4vm" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.746580 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.854202 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d-config\") pod \"ec23ee9b-035f-41dd-9f54-1ff4596f2f4d\" (UID: \"ec23ee9b-035f-41dd-9f54-1ff4596f2f4d\") " Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.854590 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmcp6\" (UniqueName: \"kubernetes.io/projected/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d-kube-api-access-dmcp6\") pod \"ec23ee9b-035f-41dd-9f54-1ff4596f2f4d\" (UID: \"ec23ee9b-035f-41dd-9f54-1ff4596f2f4d\") " Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.854623 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d-dns-svc\") pod \"ec23ee9b-035f-41dd-9f54-1ff4596f2f4d\" (UID: \"ec23ee9b-035f-41dd-9f54-1ff4596f2f4d\") " Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.855791 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ec23ee9b-035f-41dd-9f54-1ff4596f2f4d" (UID: "ec23ee9b-035f-41dd-9f54-1ff4596f2f4d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.912355 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d-kube-api-access-dmcp6" (OuterVolumeSpecName: "kube-api-access-dmcp6") pod "ec23ee9b-035f-41dd-9f54-1ff4596f2f4d" (UID: "ec23ee9b-035f-41dd-9f54-1ff4596f2f4d"). InnerVolumeSpecName "kube-api-access-dmcp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.936035 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d-config" (OuterVolumeSpecName: "config") pod "ec23ee9b-035f-41dd-9f54-1ff4596f2f4d" (UID: "ec23ee9b-035f-41dd-9f54-1ff4596f2f4d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.957983 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.958013 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmcp6\" (UniqueName: \"kubernetes.io/projected/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d-kube-api-access-dmcp6\") on node \"crc\" DevicePath \"\"" Dec 10 07:06:29 crc kubenswrapper[4765]: I1210 07:06:29.958023 4765 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 07:06:30 crc kubenswrapper[4765]: I1210 07:06:30.057889 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-hhk4h"] Dec 10 07:06:30 crc kubenswrapper[4765]: I1210 07:06:30.157224 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c7cbb8f79-hr4vm" Dec 10 07:06:30 crc kubenswrapper[4765]: I1210 07:06:30.157303 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c7cbb8f79-hr4vm" event={"ID":"ec23ee9b-035f-41dd-9f54-1ff4596f2f4d","Type":"ContainerDied","Data":"7a785e3f6a9625c8c37a00a0561ef3998bec0561bda0bbc81c92525f44801a0f"} Dec 10 07:06:30 crc kubenswrapper[4765]: I1210 07:06:30.221640 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c7cbb8f79-hr4vm"] Dec 10 07:06:30 crc kubenswrapper[4765]: I1210 07:06:30.238553 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-c7cbb8f79-hr4vm"] Dec 10 07:06:30 crc kubenswrapper[4765]: I1210 07:06:30.248851 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x"] Dec 10 07:06:30 crc kubenswrapper[4765]: I1210 07:06:30.607846 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76139240-5b05-4702-a654-4c7905d8f031" path="/var/lib/kubelet/pods/76139240-5b05-4702-a654-4c7905d8f031/volumes" Dec 10 07:06:30 crc kubenswrapper[4765]: I1210 07:06:30.608362 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3cf4274-779d-4ceb-b45c-69f500042012" path="/var/lib/kubelet/pods/b3cf4274-779d-4ceb-b45c-69f500042012/volumes" Dec 10 07:06:30 crc kubenswrapper[4765]: I1210 07:06:30.608833 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec23ee9b-035f-41dd-9f54-1ff4596f2f4d" path="/var/lib/kubelet/pods/ec23ee9b-035f-41dd-9f54-1ff4596f2f4d/volumes" Dec 10 07:06:30 crc kubenswrapper[4765]: W1210 07:06:30.901383 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4934a96a_3419_4623_9f64_58e8d9206864.slice/crio-847eb1c62f57a1066ea20ea3f189e6e4f27b7612fbda1afc8dd680b853ca6769 WatchSource:0}: Error finding container 847eb1c62f57a1066ea20ea3f189e6e4f27b7612fbda1afc8dd680b853ca6769: Status 404 returned error can't find the container with id 847eb1c62f57a1066ea20ea3f189e6e4f27b7612fbda1afc8dd680b853ca6769 Dec 10 07:06:30 crc kubenswrapper[4765]: I1210 07:06:30.939300 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-545r6" Dec 10 07:06:31 crc kubenswrapper[4765]: I1210 07:06:31.078967 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78-config\") pod \"011da8c4-a64f-48c5-89e8-f7f2c5f1cc78\" (UID: \"011da8c4-a64f-48c5-89e8-f7f2c5f1cc78\") " Dec 10 07:06:31 crc kubenswrapper[4765]: I1210 07:06:31.079039 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lp2xr\" (UniqueName: \"kubernetes.io/projected/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78-kube-api-access-lp2xr\") pod \"011da8c4-a64f-48c5-89e8-f7f2c5f1cc78\" (UID: \"011da8c4-a64f-48c5-89e8-f7f2c5f1cc78\") " Dec 10 07:06:31 crc kubenswrapper[4765]: I1210 07:06:31.079141 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78-dns-svc\") pod \"011da8c4-a64f-48c5-89e8-f7f2c5f1cc78\" (UID: \"011da8c4-a64f-48c5-89e8-f7f2c5f1cc78\") " Dec 10 07:06:31 crc kubenswrapper[4765]: I1210 07:06:31.079712 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78-config" (OuterVolumeSpecName: "config") pod "011da8c4-a64f-48c5-89e8-f7f2c5f1cc78" (UID: "011da8c4-a64f-48c5-89e8-f7f2c5f1cc78"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:06:31 crc kubenswrapper[4765]: I1210 07:06:31.079738 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "011da8c4-a64f-48c5-89e8-f7f2c5f1cc78" (UID: "011da8c4-a64f-48c5-89e8-f7f2c5f1cc78"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:06:31 crc kubenswrapper[4765]: I1210 07:06:31.080226 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:06:31 crc kubenswrapper[4765]: I1210 07:06:31.080250 4765 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 07:06:31 crc kubenswrapper[4765]: I1210 07:06:31.098208 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78-kube-api-access-lp2xr" (OuterVolumeSpecName: "kube-api-access-lp2xr") pod "011da8c4-a64f-48c5-89e8-f7f2c5f1cc78" (UID: "011da8c4-a64f-48c5-89e8-f7f2c5f1cc78"). InnerVolumeSpecName "kube-api-access-lp2xr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:06:31 crc kubenswrapper[4765]: I1210 07:06:31.167193 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-545r6" event={"ID":"011da8c4-a64f-48c5-89e8-f7f2c5f1cc78","Type":"ContainerDied","Data":"d18ccbac710b416feb24716aeca9c35dcee0df6ef76b0daafb689220b6798b14"} Dec 10 07:06:31 crc kubenswrapper[4765]: I1210 07:06:31.167292 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-545r6" Dec 10 07:06:31 crc kubenswrapper[4765]: I1210 07:06:31.169292 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-hhk4h" event={"ID":"416ec9f7-82f7-4eb1-a936-51038c6da878","Type":"ContainerStarted","Data":"0501da2016dc7178ad83bb608e005806715667bdc0baa51a63d1196d7ff8a2dd"} Dec 10 07:06:31 crc kubenswrapper[4765]: I1210 07:06:31.173815 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" event={"ID":"4934a96a-3419-4623-9f64-58e8d9206864","Type":"ContainerStarted","Data":"847eb1c62f57a1066ea20ea3f189e6e4f27b7612fbda1afc8dd680b853ca6769"} Dec 10 07:06:31 crc kubenswrapper[4765]: I1210 07:06:31.181227 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lp2xr\" (UniqueName: \"kubernetes.io/projected/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78-kube-api-access-lp2xr\") on node \"crc\" DevicePath \"\"" Dec 10 07:06:31 crc kubenswrapper[4765]: I1210 07:06:31.236686 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-545r6"] Dec 10 07:06:31 crc kubenswrapper[4765]: I1210 07:06:31.247287 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-545r6"] Dec 10 07:06:32 crc kubenswrapper[4765]: I1210 07:06:32.599626 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="011da8c4-a64f-48c5-89e8-f7f2c5f1cc78" path="/var/lib/kubelet/pods/011da8c4-a64f-48c5-89e8-f7f2c5f1cc78/volumes" Dec 10 07:06:33 crc kubenswrapper[4765]: I1210 07:06:33.190500 4765 generic.go:334] "Generic (PLEG): container finished" podID="27b317a6-1f99-4951-a064-e8ca8a38dc94" containerID="121f574801a39c842acc28bbf6ac95cb142b8de010246a252860a5bfffa998f0" exitCode=0 Dec 10 07:06:33 crc kubenswrapper[4765]: I1210 07:06:33.190542 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"27b317a6-1f99-4951-a064-e8ca8a38dc94","Type":"ContainerDied","Data":"121f574801a39c842acc28bbf6ac95cb142b8de010246a252860a5bfffa998f0"} Dec 10 07:06:34 crc kubenswrapper[4765]: I1210 07:06:34.728398 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 10 07:06:34 crc kubenswrapper[4765]: I1210 07:06:34.819422 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757dc6fff9-hqhxd"] Dec 10 07:06:38 crc kubenswrapper[4765]: I1210 07:06:38.230558 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" event={"ID":"ddcadac8-8742-4e99-9838-d62fced40efa","Type":"ContainerStarted","Data":"9212e62011c113e0190d89094c4450f805ddb1928809bc1c62fb3ebb9e10c9c0"} Dec 10 07:06:39 crc kubenswrapper[4765]: I1210 07:06:39.240118 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e","Type":"ContainerStarted","Data":"8a962de55c82b0ec8429a3fe9ee218997a5f0a7b38bc54e87077ef10ffdd1b1e"} Dec 10 07:06:39 crc kubenswrapper[4765]: I1210 07:06:39.241688 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" event={"ID":"4934a96a-3419-4623-9f64-58e8d9206864","Type":"ContainerStarted","Data":"a2c8bf082e2ca39c600927ef90c7c46793163400415d1bf326aea293998c0b6d"} Dec 10 07:06:39 crc kubenswrapper[4765]: I1210 07:06:39.243440 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"949ef1d3-9f74-4052-a482-9fea4e48d374","Type":"ContainerStarted","Data":"b8ad925fd038c364369863dffec329b548cd977bfa87db7419f585e9b7676be4"} Dec 10 07:06:39 crc kubenswrapper[4765]: I1210 07:06:39.245663 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"27b317a6-1f99-4951-a064-e8ca8a38dc94","Type":"ContainerStarted","Data":"2ac6529be604d7f51ae87512f07a4957271ca6d55e16b66d66b5f6cd885d1250"} Dec 10 07:06:39 crc kubenswrapper[4765]: I1210 07:06:39.247031 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"fb882c24-ec9a-4e19-99ac-b6f96c420cb5","Type":"ContainerStarted","Data":"434c9efa9a14afdab28272a1b95c4a867523ae3d5290dad154a712563877f571"} Dec 10 07:06:39 crc kubenswrapper[4765]: I1210 07:06:39.307880 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=13.929904323 podStartE2EDuration="51.307860421s" podCreationTimestamp="2025-12-10 07:05:48 +0000 UTC" firstStartedPulling="2025-12-10 07:05:50.459404447 +0000 UTC m=+1070.186069763" lastFinishedPulling="2025-12-10 07:06:27.837360545 +0000 UTC m=+1107.564025861" observedRunningTime="2025-12-10 07:06:39.298951837 +0000 UTC m=+1119.025617153" watchObservedRunningTime="2025-12-10 07:06:39.307860421 +0000 UTC m=+1119.034525727" Dec 10 07:06:39 crc kubenswrapper[4765]: I1210 07:06:39.675324 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 10 07:06:39 crc kubenswrapper[4765]: I1210 07:06:39.675386 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 10 07:06:40 crc kubenswrapper[4765]: I1210 07:06:40.266051 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e773fc35-349d-4256-b0e9-843aaa6dd6c3","Type":"ContainerStarted","Data":"79a5550779e6fbfd015d5f1fa3770043ff09dff1bc1f5ff7329f6ba56fef9c03"} Dec 10 07:06:40 crc kubenswrapper[4765]: I1210 07:06:40.269726 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e","Type":"ContainerStarted","Data":"4ea50907947c896cad4543aac319e9ed34423034ee1e36f452043b39462ea022"} Dec 10 07:06:40 crc kubenswrapper[4765]: I1210 07:06:40.273747 4765 generic.go:334] "Generic (PLEG): container finished" podID="4934a96a-3419-4623-9f64-58e8d9206864" containerID="a2c8bf082e2ca39c600927ef90c7c46793163400415d1bf326aea293998c0b6d" exitCode=0 Dec 10 07:06:40 crc kubenswrapper[4765]: I1210 07:06:40.273834 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" event={"ID":"4934a96a-3419-4623-9f64-58e8d9206864","Type":"ContainerDied","Data":"a2c8bf082e2ca39c600927ef90c7c46793163400415d1bf326aea293998c0b6d"} Dec 10 07:06:40 crc kubenswrapper[4765]: I1210 07:06:40.278230 4765 generic.go:334] "Generic (PLEG): container finished" podID="ddcadac8-8742-4e99-9838-d62fced40efa" containerID="f168bdf311d8eebf80a4ee3b8d92a262db08daf89eba9e01b61a63bf2d06e9ba" exitCode=0 Dec 10 07:06:40 crc kubenswrapper[4765]: I1210 07:06:40.278312 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" event={"ID":"ddcadac8-8742-4e99-9838-d62fced40efa","Type":"ContainerDied","Data":"f168bdf311d8eebf80a4ee3b8d92a262db08daf89eba9e01b61a63bf2d06e9ba"} Dec 10 07:06:40 crc kubenswrapper[4765]: I1210 07:06:40.280984 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-hhk4h" event={"ID":"416ec9f7-82f7-4eb1-a936-51038c6da878","Type":"ContainerStarted","Data":"c7191e7227a33ed65e04cf4dbddada706ffb72fecee7791bcedd21e9c75811a5"} Dec 10 07:06:40 crc kubenswrapper[4765]: I1210 07:06:40.283893 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7d035962-836c-48cf-8ea4-a3e5a23f58f9","Type":"ContainerStarted","Data":"a10778f7f86ddbb0712d58a66f9e2ebd6b349454a29483ae7d2e89e3bf524a02"} Dec 10 07:06:40 crc kubenswrapper[4765]: I1210 07:06:40.297333 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=40.486927398 podStartE2EDuration="50.297306843s" podCreationTimestamp="2025-12-10 07:05:50 +0000 UTC" firstStartedPulling="2025-12-10 07:06:28.541682832 +0000 UTC m=+1108.268348148" lastFinishedPulling="2025-12-10 07:06:38.352062267 +0000 UTC m=+1118.078727593" observedRunningTime="2025-12-10 07:06:40.296641604 +0000 UTC m=+1120.023306920" watchObservedRunningTime="2025-12-10 07:06:40.297306843 +0000 UTC m=+1120.023972159" Dec 10 07:06:40 crc kubenswrapper[4765]: I1210 07:06:40.299242 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"fb882c24-ec9a-4e19-99ac-b6f96c420cb5","Type":"ContainerStarted","Data":"bc68bc51e690d35bd38c6ac72178fa005e623ece23b2db62f390bf0ff9151544"} Dec 10 07:06:40 crc kubenswrapper[4765]: I1210 07:06:40.312819 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-v6h5d" event={"ID":"209844a2-e0ac-447f-99f6-28cd864ca648","Type":"ContainerStarted","Data":"f133729f179c48ec9b76c236a6a3f8aeeacde532a47048ee4773738cc978f7d1"} Dec 10 07:06:40 crc kubenswrapper[4765]: I1210 07:06:40.318800 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxr5k" event={"ID":"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133","Type":"ContainerStarted","Data":"f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f"} Dec 10 07:06:40 crc kubenswrapper[4765]: I1210 07:06:40.320254 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-hhk4h" podStartSLOduration=4.500387075 podStartE2EDuration="12.320227686s" podCreationTimestamp="2025-12-10 07:06:28 +0000 UTC" firstStartedPulling="2025-12-10 07:06:30.527580844 +0000 UTC m=+1110.254246160" lastFinishedPulling="2025-12-10 07:06:38.347421455 +0000 UTC m=+1118.074086771" observedRunningTime="2025-12-10 07:06:40.319288739 +0000 UTC m=+1120.045954055" watchObservedRunningTime="2025-12-10 07:06:40.320227686 +0000 UTC m=+1120.046893002" Dec 10 07:06:40 crc kubenswrapper[4765]: I1210 07:06:40.325708 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-hxr5k" Dec 10 07:06:40 crc kubenswrapper[4765]: I1210 07:06:40.417643 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=33.568207182 podStartE2EDuration="43.417623568s" podCreationTimestamp="2025-12-10 07:05:57 +0000 UTC" firstStartedPulling="2025-12-10 07:06:24.725822054 +0000 UTC m=+1104.452487370" lastFinishedPulling="2025-12-10 07:06:34.57523845 +0000 UTC m=+1114.301903756" observedRunningTime="2025-12-10 07:06:40.405482372 +0000 UTC m=+1120.132147678" watchObservedRunningTime="2025-12-10 07:06:40.417623568 +0000 UTC m=+1120.144288874" Dec 10 07:06:40 crc kubenswrapper[4765]: I1210 07:06:40.515706 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=40.694900677 podStartE2EDuration="46.515688769s" podCreationTimestamp="2025-12-10 07:05:54 +0000 UTC" firstStartedPulling="2025-12-10 07:06:28.754460098 +0000 UTC m=+1108.481125414" lastFinishedPulling="2025-12-10 07:06:34.57524819 +0000 UTC m=+1114.301913506" observedRunningTime="2025-12-10 07:06:40.506554349 +0000 UTC m=+1120.233219675" watchObservedRunningTime="2025-12-10 07:06:40.515688769 +0000 UTC m=+1120.242354085" Dec 10 07:06:40 crc kubenswrapper[4765]: I1210 07:06:40.530811 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-hxr5k" podStartSLOduration=39.124255428 podStartE2EDuration="44.530797279s" podCreationTimestamp="2025-12-10 07:05:56 +0000 UTC" firstStartedPulling="2025-12-10 07:06:28.960794271 +0000 UTC m=+1108.687459587" lastFinishedPulling="2025-12-10 07:06:34.367336122 +0000 UTC m=+1114.094001438" observedRunningTime="2025-12-10 07:06:40.524696415 +0000 UTC m=+1120.251361741" watchObservedRunningTime="2025-12-10 07:06:40.530797279 +0000 UTC m=+1120.257462595" Dec 10 07:06:40 crc kubenswrapper[4765]: I1210 07:06:40.819578 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 10 07:06:41 crc kubenswrapper[4765]: I1210 07:06:41.189820 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 10 07:06:41 crc kubenswrapper[4765]: I1210 07:06:41.189871 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 10 07:06:41 crc kubenswrapper[4765]: I1210 07:06:41.287679 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 10 07:06:41 crc kubenswrapper[4765]: I1210 07:06:41.327576 4765 generic.go:334] "Generic (PLEG): container finished" podID="209844a2-e0ac-447f-99f6-28cd864ca648" containerID="f133729f179c48ec9b76c236a6a3f8aeeacde532a47048ee4773738cc978f7d1" exitCode=0 Dec 10 07:06:41 crc kubenswrapper[4765]: I1210 07:06:41.327648 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-v6h5d" event={"ID":"209844a2-e0ac-447f-99f6-28cd864ca648","Type":"ContainerDied","Data":"f133729f179c48ec9b76c236a6a3f8aeeacde532a47048ee4773738cc978f7d1"} Dec 10 07:06:41 crc kubenswrapper[4765]: I1210 07:06:41.328689 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"78b416b3-3796-4fa3-8a4f-7fa6107d98a1","Type":"ContainerStarted","Data":"fc75f40345fbe86be4d1614b512064bb219a33382f202793ae741275bbb66a36"} Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.125169 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x"] Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.185234 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6cb545bd4c-8dhgg"] Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.186688 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.187398 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cb545bd4c-8dhgg"] Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.292675 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-ovsdbserver-sb\") pod \"dnsmasq-dns-6cb545bd4c-8dhgg\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.292751 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2wdd\" (UniqueName: \"kubernetes.io/projected/e849d8f0-dbfc-4708-8c74-be5f6a594304-kube-api-access-s2wdd\") pod \"dnsmasq-dns-6cb545bd4c-8dhgg\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.292974 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-ovsdbserver-nb\") pod \"dnsmasq-dns-6cb545bd4c-8dhgg\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.293233 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-dns-svc\") pod \"dnsmasq-dns-6cb545bd4c-8dhgg\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.293417 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-config\") pod \"dnsmasq-dns-6cb545bd4c-8dhgg\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.340905 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" event={"ID":"ddcadac8-8742-4e99-9838-d62fced40efa","Type":"ContainerStarted","Data":"3e98de9e273e3bd80065e30eab72f4b108e36a4fdc99b2aa86c93bd692ed9742"} Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.341462 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.349801 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" event={"ID":"4934a96a-3419-4623-9f64-58e8d9206864","Type":"ContainerStarted","Data":"b5e74a15b203ef52f851095f1cc16190a0f4872664a016876134693584df8e67"} Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.349840 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.365740 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" podStartSLOduration=13.365720585 podStartE2EDuration="13.365720585s" podCreationTimestamp="2025-12-10 07:06:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:06:42.361732492 +0000 UTC m=+1122.088397808" watchObservedRunningTime="2025-12-10 07:06:42.365720585 +0000 UTC m=+1122.092385901" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.382637 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" podStartSLOduration=6.518936221 podStartE2EDuration="13.382612226s" podCreationTimestamp="2025-12-10 07:06:29 +0000 UTC" firstStartedPulling="2025-12-10 07:06:30.904512093 +0000 UTC m=+1110.631177409" lastFinishedPulling="2025-12-10 07:06:37.768188098 +0000 UTC m=+1117.494853414" observedRunningTime="2025-12-10 07:06:42.380494736 +0000 UTC m=+1122.107160062" watchObservedRunningTime="2025-12-10 07:06:42.382612226 +0000 UTC m=+1122.109277552" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.395333 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-ovsdbserver-nb\") pod \"dnsmasq-dns-6cb545bd4c-8dhgg\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.395548 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-dns-svc\") pod \"dnsmasq-dns-6cb545bd4c-8dhgg\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.395588 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-config\") pod \"dnsmasq-dns-6cb545bd4c-8dhgg\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.395640 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-ovsdbserver-sb\") pod \"dnsmasq-dns-6cb545bd4c-8dhgg\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.395923 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2wdd\" (UniqueName: \"kubernetes.io/projected/e849d8f0-dbfc-4708-8c74-be5f6a594304-kube-api-access-s2wdd\") pod \"dnsmasq-dns-6cb545bd4c-8dhgg\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.396405 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-ovsdbserver-nb\") pod \"dnsmasq-dns-6cb545bd4c-8dhgg\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.397188 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-ovsdbserver-sb\") pod \"dnsmasq-dns-6cb545bd4c-8dhgg\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.397323 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-config\") pod \"dnsmasq-dns-6cb545bd4c-8dhgg\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.398196 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-dns-svc\") pod \"dnsmasq-dns-6cb545bd4c-8dhgg\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.418907 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2wdd\" (UniqueName: \"kubernetes.io/projected/e849d8f0-dbfc-4708-8c74-be5f6a594304-kube-api-access-s2wdd\") pod \"dnsmasq-dns-6cb545bd4c-8dhgg\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:06:42 crc kubenswrapper[4765]: I1210 07:06:42.527802 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.043931 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cb545bd4c-8dhgg"] Dec 10 07:06:43 crc kubenswrapper[4765]: W1210 07:06:43.049551 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode849d8f0_dbfc_4708_8c74_be5f6a594304.slice/crio-de52b13cfa1f01d053f8072829326b2c4aa1ee4e5074440efa4f59bf8e342fbc WatchSource:0}: Error finding container de52b13cfa1f01d053f8072829326b2c4aa1ee4e5074440efa4f59bf8e342fbc: Status 404 returned error can't find the container with id de52b13cfa1f01d053f8072829326b2c4aa1ee4e5074440efa4f59bf8e342fbc Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.166767 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.209157 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.209626 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.214217 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.214273 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.214222 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.214461 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-6jr2m" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.317055 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-lock\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.317129 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.317240 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-cache\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.317261 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-298hl\" (UniqueName: \"kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-kube-api-access-298hl\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.317285 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.357653 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" event={"ID":"e849d8f0-dbfc-4708-8c74-be5f6a594304","Type":"ContainerStarted","Data":"de52b13cfa1f01d053f8072829326b2c4aa1ee4e5074440efa4f59bf8e342fbc"} Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.357771 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" containerID="cri-o://b5e74a15b203ef52f851095f1cc16190a0f4872664a016876134693584df8e67" gracePeriod=10 Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.424021 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-cache\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.424577 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-cache\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.425809 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-298hl\" (UniqueName: \"kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-kube-api-access-298hl\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.425894 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.425984 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-lock\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.426025 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:43 crc kubenswrapper[4765]: E1210 07:06:43.426209 4765 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 10 07:06:43 crc kubenswrapper[4765]: E1210 07:06:43.426226 4765 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 10 07:06:43 crc kubenswrapper[4765]: E1210 07:06:43.426274 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift podName:d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86 nodeName:}" failed. No retries permitted until 2025-12-10 07:06:43.92625617 +0000 UTC m=+1123.652921486 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift") pod "swift-storage-0" (UID: "d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86") : configmap "swift-ring-files" not found Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.426417 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-lock\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.426762 4765 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/swift-storage-0" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.448574 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-298hl\" (UniqueName: \"kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-kube-api-access-298hl\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.453203 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.694728 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-9mxvj"] Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.696745 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.698560 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.698779 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.699009 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.706384 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-9mxvj"] Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.731676 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b145a008-b02d-4c5b-b426-5da68b2d2d09-scripts\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.731794 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b145a008-b02d-4c5b-b426-5da68b2d2d09-swiftconf\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.731855 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b145a008-b02d-4c5b-b426-5da68b2d2d09-combined-ca-bundle\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.731881 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b145a008-b02d-4c5b-b426-5da68b2d2d09-dispersionconf\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.731912 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmzb9\" (UniqueName: \"kubernetes.io/projected/b145a008-b02d-4c5b-b426-5da68b2d2d09-kube-api-access-zmzb9\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.732063 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b145a008-b02d-4c5b-b426-5da68b2d2d09-etc-swift\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.732126 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b145a008-b02d-4c5b-b426-5da68b2d2d09-ring-data-devices\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.819574 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.834246 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b145a008-b02d-4c5b-b426-5da68b2d2d09-combined-ca-bundle\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.834302 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b145a008-b02d-4c5b-b426-5da68b2d2d09-dispersionconf\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.834354 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmzb9\" (UniqueName: \"kubernetes.io/projected/b145a008-b02d-4c5b-b426-5da68b2d2d09-kube-api-access-zmzb9\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.834394 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b145a008-b02d-4c5b-b426-5da68b2d2d09-etc-swift\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.834445 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b145a008-b02d-4c5b-b426-5da68b2d2d09-ring-data-devices\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.835056 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b145a008-b02d-4c5b-b426-5da68b2d2d09-etc-swift\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.835235 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b145a008-b02d-4c5b-b426-5da68b2d2d09-scripts\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.835385 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b145a008-b02d-4c5b-b426-5da68b2d2d09-ring-data-devices\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.836279 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b145a008-b02d-4c5b-b426-5da68b2d2d09-scripts\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.836422 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b145a008-b02d-4c5b-b426-5da68b2d2d09-swiftconf\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.838304 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b145a008-b02d-4c5b-b426-5da68b2d2d09-dispersionconf\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.840492 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b145a008-b02d-4c5b-b426-5da68b2d2d09-combined-ca-bundle\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.840802 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b145a008-b02d-4c5b-b426-5da68b2d2d09-swiftconf\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.850684 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmzb9\" (UniqueName: \"kubernetes.io/projected/b145a008-b02d-4c5b-b426-5da68b2d2d09-kube-api-access-zmzb9\") pod \"swift-ring-rebalance-9mxvj\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:06:43 crc kubenswrapper[4765]: I1210 07:06:43.938339 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:43 crc kubenswrapper[4765]: E1210 07:06:43.938687 4765 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 10 07:06:43 crc kubenswrapper[4765]: E1210 07:06:43.938736 4765 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 10 07:06:43 crc kubenswrapper[4765]: E1210 07:06:43.938809 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift podName:d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86 nodeName:}" failed. No retries permitted until 2025-12-10 07:06:44.938787998 +0000 UTC m=+1124.665453364 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift") pod "swift-storage-0" (UID: "d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86") : configmap "swift-ring-files" not found Dec 10 07:06:44 crc kubenswrapper[4765]: I1210 07:06:44.366163 4765 generic.go:334] "Generic (PLEG): container finished" podID="949ef1d3-9f74-4052-a482-9fea4e48d374" containerID="b8ad925fd038c364369863dffec329b548cd977bfa87db7419f585e9b7676be4" exitCode=0 Dec 10 07:06:44 crc kubenswrapper[4765]: I1210 07:06:44.366215 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"949ef1d3-9f74-4052-a482-9fea4e48d374","Type":"ContainerDied","Data":"b8ad925fd038c364369863dffec329b548cd977bfa87db7419f585e9b7676be4"} Dec 10 07:06:44 crc kubenswrapper[4765]: I1210 07:06:44.749584 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Dec 10 07:06:44 crc kubenswrapper[4765]: I1210 07:06:44.955548 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:44 crc kubenswrapper[4765]: E1210 07:06:44.955727 4765 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 10 07:06:44 crc kubenswrapper[4765]: E1210 07:06:44.955747 4765 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 10 07:06:44 crc kubenswrapper[4765]: E1210 07:06:44.955799 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift podName:d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86 nodeName:}" failed. No retries permitted until 2025-12-10 07:06:46.955783344 +0000 UTC m=+1126.682448660 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift") pod "swift-storage-0" (UID: "d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86") : configmap "swift-ring-files" not found Dec 10 07:06:45 crc kubenswrapper[4765]: I1210 07:06:45.795830 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 10 07:06:46 crc kubenswrapper[4765]: I1210 07:06:46.006110 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 10 07:06:46 crc kubenswrapper[4765]: I1210 07:06:46.992755 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:46 crc kubenswrapper[4765]: E1210 07:06:46.992988 4765 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 10 07:06:46 crc kubenswrapper[4765]: E1210 07:06:46.993153 4765 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 10 07:06:46 crc kubenswrapper[4765]: E1210 07:06:46.993218 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift podName:d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86 nodeName:}" failed. No retries permitted until 2025-12-10 07:06:50.993196124 +0000 UTC m=+1130.719861440 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift") pod "swift-storage-0" (UID: "d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86") : configmap "swift-ring-files" not found Dec 10 07:06:47 crc kubenswrapper[4765]: I1210 07:06:47.747911 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Dec 10 07:06:49 crc kubenswrapper[4765]: I1210 07:06:49.537117 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Dec 10 07:06:49 crc kubenswrapper[4765]: I1210 07:06:49.747715 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Dec 10 07:06:50 crc kubenswrapper[4765]: I1210 07:06:50.748624 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Dec 10 07:06:50 crc kubenswrapper[4765]: I1210 07:06:50.748937 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:06:50 crc kubenswrapper[4765]: I1210 07:06:50.749412 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Dec 10 07:06:50 crc kubenswrapper[4765]: I1210 07:06:50.750421 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="dnsmasq-dns" containerStatusID={"Type":"cri-o","ID":"3e98de9e273e3bd80065e30eab72f4b108e36a4fdc99b2aa86c93bd692ed9742"} pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" containerMessage="Container dnsmasq-dns failed liveness probe, will be restarted" Dec 10 07:06:50 crc kubenswrapper[4765]: I1210 07:06:50.750473 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" containerID="cri-o://3e98de9e273e3bd80065e30eab72f4b108e36a4fdc99b2aa86c93bd692ed9742" gracePeriod=10 Dec 10 07:06:51 crc kubenswrapper[4765]: I1210 07:06:51.057368 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:51 crc kubenswrapper[4765]: E1210 07:06:51.057630 4765 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 10 07:06:51 crc kubenswrapper[4765]: E1210 07:06:51.057649 4765 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 10 07:06:51 crc kubenswrapper[4765]: E1210 07:06:51.057707 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift podName:d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86 nodeName:}" failed. No retries permitted until 2025-12-10 07:06:59.057689638 +0000 UTC m=+1138.784354954 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift") pod "swift-storage-0" (UID: "d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86") : configmap "swift-ring-files" not found Dec 10 07:06:51 crc kubenswrapper[4765]: I1210 07:06:51.291018 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 10 07:06:54 crc kubenswrapper[4765]: I1210 07:06:54.533883 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Dec 10 07:06:54 crc kubenswrapper[4765]: I1210 07:06:54.748551 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Dec 10 07:06:59 crc kubenswrapper[4765]: I1210 07:06:59.076271 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:06:59 crc kubenswrapper[4765]: E1210 07:06:59.076496 4765 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 10 07:06:59 crc kubenswrapper[4765]: E1210 07:06:59.076666 4765 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 10 07:06:59 crc kubenswrapper[4765]: E1210 07:06:59.076735 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift podName:d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86 nodeName:}" failed. No retries permitted until 2025-12-10 07:07:15.076713107 +0000 UTC m=+1154.803378433 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift") pod "swift-storage-0" (UID: "d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86") : configmap "swift-ring-files" not found Dec 10 07:06:59 crc kubenswrapper[4765]: I1210 07:06:59.534284 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Dec 10 07:06:59 crc kubenswrapper[4765]: I1210 07:06:59.748197 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Dec 10 07:07:04 crc kubenswrapper[4765]: I1210 07:07:04.049855 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:07:04 crc kubenswrapper[4765]: I1210 07:07:04.050488 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:07:04 crc kubenswrapper[4765]: I1210 07:07:04.534714 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Dec 10 07:07:04 crc kubenswrapper[4765]: I1210 07:07:04.748405 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Dec 10 07:07:09 crc kubenswrapper[4765]: I1210 07:07:09.533844 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Dec 10 07:07:09 crc kubenswrapper[4765]: I1210 07:07:09.748073 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Dec 10 07:07:12 crc kubenswrapper[4765]: I1210 07:07:12.565952 4765 generic.go:334] "Generic (PLEG): container finished" podID="7d035962-836c-48cf-8ea4-a3e5a23f58f9" containerID="a10778f7f86ddbb0712d58a66f9e2ebd6b349454a29483ae7d2e89e3bf524a02" exitCode=0 Dec 10 07:07:12 crc kubenswrapper[4765]: I1210 07:07:12.566030 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7d035962-836c-48cf-8ea4-a3e5a23f58f9","Type":"ContainerDied","Data":"a10778f7f86ddbb0712d58a66f9e2ebd6b349454a29483ae7d2e89e3bf524a02"} Dec 10 07:07:14 crc kubenswrapper[4765]: I1210 07:07:14.533633 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Dec 10 07:07:14 crc kubenswrapper[4765]: I1210 07:07:14.748508 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Dec 10 07:07:15 crc kubenswrapper[4765]: I1210 07:07:15.162182 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:07:15 crc kubenswrapper[4765]: E1210 07:07:15.162448 4765 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 10 07:07:15 crc kubenswrapper[4765]: E1210 07:07:15.162663 4765 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 10 07:07:15 crc kubenswrapper[4765]: E1210 07:07:15.162740 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift podName:d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86 nodeName:}" failed. No retries permitted until 2025-12-10 07:07:47.1627217 +0000 UTC m=+1186.889387016 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift") pod "swift-storage-0" (UID: "d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86") : configmap "swift-ring-files" not found Dec 10 07:07:16 crc kubenswrapper[4765]: I1210 07:07:16.775757 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-hxr5k" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" probeResult="failure" output="command timed out" Dec 10 07:07:16 crc kubenswrapper[4765]: I1210 07:07:16.776044 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/ovn-controller-hxr5k" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" probeResult="failure" output="command timed out" Dec 10 07:07:19 crc kubenswrapper[4765]: I1210 07:07:19.534328 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Dec 10 07:07:19 crc kubenswrapper[4765]: I1210 07:07:19.748249 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Dec 10 07:07:21 crc kubenswrapper[4765]: I1210 07:07:21.773414 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-hxr5k" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" probeResult="failure" output="command timed out" Dec 10 07:07:21 crc kubenswrapper[4765]: I1210 07:07:21.774994 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/ovn-controller-hxr5k" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" probeResult="failure" output="command timed out" Dec 10 07:07:24 crc kubenswrapper[4765]: I1210 07:07:24.534060 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Dec 10 07:07:24 crc kubenswrapper[4765]: I1210 07:07:24.753374 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Dec 10 07:07:26 crc kubenswrapper[4765]: I1210 07:07:26.771986 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-hxr5k" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" probeResult="failure" output="command timed out" Dec 10 07:07:26 crc kubenswrapper[4765]: I1210 07:07:26.773729 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/ovn-controller-hxr5k" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" probeResult="failure" output="command timed out" Dec 10 07:07:26 crc kubenswrapper[4765]: I1210 07:07:26.773813 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/ovn-controller-hxr5k" Dec 10 07:07:26 crc kubenswrapper[4765]: I1210 07:07:26.774507 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="ovn-controller" containerStatusID={"Type":"cri-o","ID":"f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f"} pod="openstack/ovn-controller-hxr5k" containerMessage="Container ovn-controller failed liveness probe, will be restarted" Dec 10 07:07:29 crc kubenswrapper[4765]: I1210 07:07:29.535149 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Dec 10 07:07:29 crc kubenswrapper[4765]: I1210 07:07:29.748704 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Dec 10 07:07:30 crc kubenswrapper[4765]: I1210 07:07:30.861304 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-hxr5k" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" probeResult="failure" output="" Dec 10 07:07:30 crc kubenswrapper[4765]: E1210 07:07:30.862429 4765 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Dec 10 07:07:30 crc kubenswrapper[4765]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2025-12-10T07:07:27Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Dec 10 07:07:30 crc kubenswrapper[4765]: 2025-12-10T07:07:28Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Dec 10 07:07:30 crc kubenswrapper[4765]: /etc/init.d/functions: line 589: 46 Alarm clock "$@" Dec 10 07:07:30 crc kubenswrapper[4765]: > execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-hxr5k" message=< Dec 10 07:07:30 crc kubenswrapper[4765]: Exiting ovn-controller (1) [FAILED] Dec 10 07:07:30 crc kubenswrapper[4765]: Killing ovn-controller (1) [ OK ] Dec 10 07:07:30 crc kubenswrapper[4765]: 2025-12-10T07:07:27Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Dec 10 07:07:30 crc kubenswrapper[4765]: 2025-12-10T07:07:28Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Dec 10 07:07:30 crc kubenswrapper[4765]: /etc/init.d/functions: line 589: 46 Alarm clock "$@" Dec 10 07:07:30 crc kubenswrapper[4765]: > Dec 10 07:07:30 crc kubenswrapper[4765]: E1210 07:07:30.862508 4765 kuberuntime_container.go:691] "PreStop hook failed" err=< Dec 10 07:07:30 crc kubenswrapper[4765]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2025-12-10T07:07:27Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Dec 10 07:07:30 crc kubenswrapper[4765]: 2025-12-10T07:07:28Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Dec 10 07:07:30 crc kubenswrapper[4765]: /etc/init.d/functions: line 589: 46 Alarm clock "$@" Dec 10 07:07:30 crc kubenswrapper[4765]: > pod="openstack/ovn-controller-hxr5k" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" containerID="cri-o://f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f" Dec 10 07:07:30 crc kubenswrapper[4765]: I1210 07:07:30.862557 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-hxr5k" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" containerID="cri-o://f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f" gracePeriod=26 Dec 10 07:07:30 crc kubenswrapper[4765]: E1210 07:07:30.868356 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f is running failed: container process not found" containerID="f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Dec 10 07:07:30 crc kubenswrapper[4765]: E1210 07:07:30.869311 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f is running failed: container process not found" containerID="f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Dec 10 07:07:30 crc kubenswrapper[4765]: E1210 07:07:30.870209 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f is running failed: container process not found" containerID="f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Dec 10 07:07:30 crc kubenswrapper[4765]: E1210 07:07:30.870279 4765 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-hxr5k" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" Dec 10 07:07:31 crc kubenswrapper[4765]: E1210 07:07:31.899643 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f is running failed: container process not found" containerID="f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Dec 10 07:07:31 crc kubenswrapper[4765]: E1210 07:07:31.902471 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f is running failed: container process not found" containerID="f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Dec 10 07:07:31 crc kubenswrapper[4765]: E1210 07:07:31.902753 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f is running failed: container process not found" containerID="f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Dec 10 07:07:31 crc kubenswrapper[4765]: E1210 07:07:31.902791 4765 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-hxr5k" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" Dec 10 07:07:34 crc kubenswrapper[4765]: I1210 07:07:34.049884 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:07:34 crc kubenswrapper[4765]: I1210 07:07:34.049942 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:07:34 crc kubenswrapper[4765]: I1210 07:07:34.534795 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Dec 10 07:07:34 crc kubenswrapper[4765]: I1210 07:07:34.748765 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Dec 10 07:07:36 crc kubenswrapper[4765]: E1210 07:07:36.898557 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f is running failed: container process not found" containerID="f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Dec 10 07:07:36 crc kubenswrapper[4765]: E1210 07:07:36.900296 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f is running failed: container process not found" containerID="f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Dec 10 07:07:36 crc kubenswrapper[4765]: E1210 07:07:36.900695 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f is running failed: container process not found" containerID="f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Dec 10 07:07:36 crc kubenswrapper[4765]: E1210 07:07:36.900739 4765 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-hxr5k" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" Dec 10 07:07:38 crc kubenswrapper[4765]: I1210 07:07:38.458794 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-hxr5k_8b2c99d6-f2e1-4c1c-8825-e8c62d00d133/ovn-controller/0.log" Dec 10 07:07:38 crc kubenswrapper[4765]: I1210 07:07:38.459125 4765 generic.go:334] "Generic (PLEG): container finished" podID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerID="f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f" exitCode=-1 Dec 10 07:07:38 crc kubenswrapper[4765]: I1210 07:07:38.459165 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxr5k" event={"ID":"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133","Type":"ContainerDied","Data":"f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f"} Dec 10 07:07:39 crc kubenswrapper[4765]: I1210 07:07:39.535557 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Dec 10 07:07:39 crc kubenswrapper[4765]: I1210 07:07:39.748029 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.080510 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.088819 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.156056 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.192666 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.505124 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-v6h5d" event={"ID":"209844a2-e0ac-447f-99f6-28cd864ca648","Type":"ContainerStarted","Data":"1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab"} Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.521103 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.522994 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.529320 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.529505 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-92xh7" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.529669 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.529750 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.543236 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.546674 4765 generic.go:334] "Generic (PLEG): container finished" podID="ddcadac8-8742-4e99-9838-d62fced40efa" containerID="3e98de9e273e3bd80065e30eab72f4b108e36a4fdc99b2aa86c93bd692ed9742" exitCode=137 Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.546745 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" event={"ID":"ddcadac8-8742-4e99-9838-d62fced40efa","Type":"ContainerDied","Data":"3e98de9e273e3bd80065e30eab72f4b108e36a4fdc99b2aa86c93bd692ed9742"} Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.549774 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" event={"ID":"e849d8f0-dbfc-4708-8c74-be5f6a594304","Type":"ContainerStarted","Data":"8f77e877be4ae67a1e616cfbc9d4ea6a3bf230029111adcee5a6ed06371acc89"} Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.632129 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b62b966-7b0a-4099-977c-44682f703187-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.632586 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b62b966-7b0a-4099-977c-44682f703187-config\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.632665 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b62b966-7b0a-4099-977c-44682f703187-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.632740 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4b62b966-7b0a-4099-977c-44682f703187-scripts\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.632792 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b62b966-7b0a-4099-977c-44682f703187-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.632927 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4b62b966-7b0a-4099-977c-44682f703187-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.633034 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkwgp\" (UniqueName: \"kubernetes.io/projected/4b62b966-7b0a-4099-977c-44682f703187-kube-api-access-bkwgp\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.734628 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4b62b966-7b0a-4099-977c-44682f703187-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.734701 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkwgp\" (UniqueName: \"kubernetes.io/projected/4b62b966-7b0a-4099-977c-44682f703187-kube-api-access-bkwgp\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.734743 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b62b966-7b0a-4099-977c-44682f703187-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.734775 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b62b966-7b0a-4099-977c-44682f703187-config\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.734806 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b62b966-7b0a-4099-977c-44682f703187-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.734850 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4b62b966-7b0a-4099-977c-44682f703187-scripts\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.734878 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b62b966-7b0a-4099-977c-44682f703187-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.735716 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4b62b966-7b0a-4099-977c-44682f703187-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.736240 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4b62b966-7b0a-4099-977c-44682f703187-scripts\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.737019 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b62b966-7b0a-4099-977c-44682f703187-config\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.746475 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b62b966-7b0a-4099-977c-44682f703187-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.746907 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b62b966-7b0a-4099-977c-44682f703187-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.753206 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b62b966-7b0a-4099-977c-44682f703187-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.758045 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkwgp\" (UniqueName: \"kubernetes.io/projected/4b62b966-7b0a-4099-977c-44682f703187-kube-api-access-bkwgp\") pod \"ovn-northd-0\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.765227 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-6jr2m" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.774196 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:07:41 crc kubenswrapper[4765]: I1210 07:07:41.819424 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 10 07:07:41 crc kubenswrapper[4765]: E1210 07:07:41.900251 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f is running failed: container process not found" containerID="f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Dec 10 07:07:41 crc kubenswrapper[4765]: E1210 07:07:41.901111 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f is running failed: container process not found" containerID="f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Dec 10 07:07:41 crc kubenswrapper[4765]: E1210 07:07:41.901352 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f is running failed: container process not found" containerID="f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Dec 10 07:07:41 crc kubenswrapper[4765]: E1210 07:07:41.901632 4765 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-hxr5k" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.482296 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-9mxvj"] Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.562501 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.582559 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"949ef1d3-9f74-4052-a482-9fea4e48d374","Type":"ContainerStarted","Data":"df8d58bedf5e00ee650bbe91fcb03935b51f76f914be11d02374897423dbf609"} Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.587789 4765 generic.go:334] "Generic (PLEG): container finished" podID="e849d8f0-dbfc-4708-8c74-be5f6a594304" containerID="8f77e877be4ae67a1e616cfbc9d4ea6a3bf230029111adcee5a6ed06371acc89" exitCode=0 Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.587859 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" event={"ID":"e849d8f0-dbfc-4708-8c74-be5f6a594304","Type":"ContainerDied","Data":"8f77e877be4ae67a1e616cfbc9d4ea6a3bf230029111adcee5a6ed06371acc89"} Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.587903 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" event={"ID":"e849d8f0-dbfc-4708-8c74-be5f6a594304","Type":"ContainerStarted","Data":"872d3c1338bf9552ba1b0ae26f5b306fd06424196f95fee421e4bc0c8dcbdf54"} Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.588452 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.598526 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7d035962-836c-48cf-8ea4-a3e5a23f58f9","Type":"ContainerStarted","Data":"f54cf327f09ae7940909c6415da47f057333ee5a45036d48b02fd4c6fd91cb2d"} Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.598766 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.598977 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-v6h5d" event={"ID":"209844a2-e0ac-447f-99f6-28cd864ca648","Type":"ContainerStarted","Data":"d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079"} Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.599188 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.600927 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-9mxvj" event={"ID":"b145a008-b02d-4c5b-b426-5da68b2d2d09","Type":"ContainerStarted","Data":"522ddc241cc8be1ef3c2a8741d750675aa3cc0ca4b7bfd4226a6d16f9ada366d"} Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.629459 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-hxr5k_8b2c99d6-f2e1-4c1c-8825-e8c62d00d133/ovn-controller/0.log" Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.629617 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxr5k" event={"ID":"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133","Type":"ContainerStarted","Data":"eb6633a896afa14ec87a70d84b7673bb68759a50c37aebfb43aa25d44e6ec694"} Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.629975 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-hxr5k" Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.632956 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" podStartSLOduration=60.632940018 podStartE2EDuration="1m0.632940018s" podCreationTimestamp="2025-12-10 07:06:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:07:42.630481279 +0000 UTC m=+1182.357146615" watchObservedRunningTime="2025-12-10 07:07:42.632940018 +0000 UTC m=+1182.359605334" Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.634640 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" event={"ID":"ddcadac8-8742-4e99-9838-d62fced40efa","Type":"ContainerStarted","Data":"3ed72b4473c94f6c626b8aa7c73322116872e5bda9494c54d59f1fd9405cce81"} Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.635213 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.635523 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=-9223371920.219263 podStartE2EDuration="1m56.635513451s" podCreationTimestamp="2025-12-10 07:05:46 +0000 UTC" firstStartedPulling="2025-12-10 07:05:48.875026312 +0000 UTC m=+1068.601691628" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:07:42.608316379 +0000 UTC m=+1182.334981695" watchObservedRunningTime="2025-12-10 07:07:42.635513451 +0000 UTC m=+1182.362178767" Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.703003 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=66.79107902 podStartE2EDuration="1m58.702980596s" podCreationTimestamp="2025-12-10 07:05:44 +0000 UTC" firstStartedPulling="2025-12-10 07:05:46.465626875 +0000 UTC m=+1066.192292191" lastFinishedPulling="2025-12-10 07:06:38.377528451 +0000 UTC m=+1118.104193767" observedRunningTime="2025-12-10 07:07:42.689310728 +0000 UTC m=+1182.415976064" watchObservedRunningTime="2025-12-10 07:07:42.702980596 +0000 UTC m=+1182.429645912" Dec 10 07:07:42 crc kubenswrapper[4765]: I1210 07:07:42.706045 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-v6h5d" podStartSLOduration=100.952970579 podStartE2EDuration="1m46.706033993s" podCreationTimestamp="2025-12-10 07:05:56 +0000 UTC" firstStartedPulling="2025-12-10 07:06:28.557108141 +0000 UTC m=+1108.283773457" lastFinishedPulling="2025-12-10 07:06:34.310171555 +0000 UTC m=+1114.036836871" observedRunningTime="2025-12-10 07:07:42.66611672 +0000 UTC m=+1182.392782046" watchObservedRunningTime="2025-12-10 07:07:42.706033993 +0000 UTC m=+1182.432699309" Dec 10 07:07:43 crc kubenswrapper[4765]: I1210 07:07:43.648298 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"4b62b966-7b0a-4099-977c-44682f703187","Type":"ContainerStarted","Data":"2f22b42012c8f998a234b3a4cfeb7bf6c49da45ba398308a6f3d43549b1d9fd6"} Dec 10 07:07:43 crc kubenswrapper[4765]: I1210 07:07:43.648847 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:07:44 crc kubenswrapper[4765]: I1210 07:07:44.534336 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Dec 10 07:07:47 crc kubenswrapper[4765]: I1210 07:07:47.178595 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:07:47 crc kubenswrapper[4765]: E1210 07:07:47.179464 4765 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 10 07:07:47 crc kubenswrapper[4765]: E1210 07:07:47.179484 4765 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 10 07:07:47 crc kubenswrapper[4765]: E1210 07:07:47.179529 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift podName:d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86 nodeName:}" failed. No retries permitted until 2025-12-10 07:08:51.179513275 +0000 UTC m=+1250.906178591 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift") pod "swift-storage-0" (UID: "d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86") : configmap "swift-ring-files" not found Dec 10 07:07:47 crc kubenswrapper[4765]: I1210 07:07:47.546414 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:07:47 crc kubenswrapper[4765]: I1210 07:07:47.615966 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757dc6fff9-hqhxd"] Dec 10 07:07:47 crc kubenswrapper[4765]: I1210 07:07:47.616384 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" containerID="cri-o://3ed72b4473c94f6c626b8aa7c73322116872e5bda9494c54d59f1fd9405cce81" gracePeriod=10 Dec 10 07:07:47 crc kubenswrapper[4765]: I1210 07:07:47.618273 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:07:47 crc kubenswrapper[4765]: I1210 07:07:47.717334 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-9mxvj" event={"ID":"b145a008-b02d-4c5b-b426-5da68b2d2d09","Type":"ContainerStarted","Data":"fa5034942a101db0b3408988ffd5a22073166bac86fe65bff4aae4090676f49b"} Dec 10 07:07:47 crc kubenswrapper[4765]: I1210 07:07:47.720339 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"4b62b966-7b0a-4099-977c-44682f703187","Type":"ContainerStarted","Data":"85b3d34a3b05018017b81c0696d2b13e8bc79227f19e89ee345bdd1bfbe284c6"} Dec 10 07:07:47 crc kubenswrapper[4765]: I1210 07:07:47.720377 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"4b62b966-7b0a-4099-977c-44682f703187","Type":"ContainerStarted","Data":"07e96d7bfb948445f7eac9ed07260fdbff76c9bd60d8b7015a2929ae7b5bec3c"} Dec 10 07:07:47 crc kubenswrapper[4765]: I1210 07:07:47.720909 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 10 07:07:47 crc kubenswrapper[4765]: I1210 07:07:47.746496 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-9mxvj" podStartSLOduration=60.663007916 podStartE2EDuration="1m4.746476798s" podCreationTimestamp="2025-12-10 07:06:43 +0000 UTC" firstStartedPulling="2025-12-10 07:07:42.510526494 +0000 UTC m=+1182.237191810" lastFinishedPulling="2025-12-10 07:07:46.593995376 +0000 UTC m=+1186.320660692" observedRunningTime="2025-12-10 07:07:47.74338374 +0000 UTC m=+1187.470049066" watchObservedRunningTime="2025-12-10 07:07:47.746476798 +0000 UTC m=+1187.473142114" Dec 10 07:07:47 crc kubenswrapper[4765]: I1210 07:07:47.768675 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.836677934 podStartE2EDuration="6.768658437s" podCreationTimestamp="2025-12-10 07:07:41 +0000 UTC" firstStartedPulling="2025-12-10 07:07:42.576770334 +0000 UTC m=+1182.303435650" lastFinishedPulling="2025-12-10 07:07:46.508750837 +0000 UTC m=+1186.235416153" observedRunningTime="2025-12-10 07:07:47.76172781 +0000 UTC m=+1187.488393126" watchObservedRunningTime="2025-12-10 07:07:47.768658437 +0000 UTC m=+1187.495323753" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:47.924339 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:47.937630 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.423416 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.608396 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.690918 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-ovsdbserver-nb\") pod \"ddcadac8-8742-4e99-9838-d62fced40efa\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.691080 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-config\") pod \"ddcadac8-8742-4e99-9838-d62fced40efa\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.691164 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-dns-svc\") pod \"ddcadac8-8742-4e99-9838-d62fced40efa\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.691340 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-ovsdbserver-sb\") pod \"ddcadac8-8742-4e99-9838-d62fced40efa\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.691401 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qv6tw\" (UniqueName: \"kubernetes.io/projected/ddcadac8-8742-4e99-9838-d62fced40efa-kube-api-access-qv6tw\") pod \"ddcadac8-8742-4e99-9838-d62fced40efa\" (UID: \"ddcadac8-8742-4e99-9838-d62fced40efa\") " Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.717990 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddcadac8-8742-4e99-9838-d62fced40efa-kube-api-access-qv6tw" (OuterVolumeSpecName: "kube-api-access-qv6tw") pod "ddcadac8-8742-4e99-9838-d62fced40efa" (UID: "ddcadac8-8742-4e99-9838-d62fced40efa"). InnerVolumeSpecName "kube-api-access-qv6tw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.747353 4765 generic.go:334] "Generic (PLEG): container finished" podID="ddcadac8-8742-4e99-9838-d62fced40efa" containerID="3ed72b4473c94f6c626b8aa7c73322116872e5bda9494c54d59f1fd9405cce81" exitCode=0 Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.748502 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.749081 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" event={"ID":"ddcadac8-8742-4e99-9838-d62fced40efa","Type":"ContainerDied","Data":"3ed72b4473c94f6c626b8aa7c73322116872e5bda9494c54d59f1fd9405cce81"} Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.749207 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757dc6fff9-hqhxd" event={"ID":"ddcadac8-8742-4e99-9838-d62fced40efa","Type":"ContainerDied","Data":"9212e62011c113e0190d89094c4450f805ddb1928809bc1c62fb3ebb9e10c9c0"} Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.749286 4765 scope.go:117] "RemoveContainer" containerID="3ed72b4473c94f6c626b8aa7c73322116872e5bda9494c54d59f1fd9405cce81" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.774174 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ddcadac8-8742-4e99-9838-d62fced40efa" (UID: "ddcadac8-8742-4e99-9838-d62fced40efa"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.791902 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ddcadac8-8742-4e99-9838-d62fced40efa" (UID: "ddcadac8-8742-4e99-9838-d62fced40efa"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.792185 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-config" (OuterVolumeSpecName: "config") pod "ddcadac8-8742-4e99-9838-d62fced40efa" (UID: "ddcadac8-8742-4e99-9838-d62fced40efa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.793168 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ddcadac8-8742-4e99-9838-d62fced40efa" (UID: "ddcadac8-8742-4e99-9838-d62fced40efa"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.801161 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.801187 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qv6tw\" (UniqueName: \"kubernetes.io/projected/ddcadac8-8742-4e99-9838-d62fced40efa-kube-api-access-qv6tw\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.801198 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.801208 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.801217 4765 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ddcadac8-8742-4e99-9838-d62fced40efa-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.841982 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.849044 4765 scope.go:117] "RemoveContainer" containerID="3e98de9e273e3bd80065e30eab72f4b108e36a4fdc99b2aa86c93bd692ed9742" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.871093 4765 scope.go:117] "RemoveContainer" containerID="f168bdf311d8eebf80a4ee3b8d92a262db08daf89eba9e01b61a63bf2d06e9ba" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.909575 4765 scope.go:117] "RemoveContainer" containerID="3ed72b4473c94f6c626b8aa7c73322116872e5bda9494c54d59f1fd9405cce81" Dec 10 07:07:48 crc kubenswrapper[4765]: E1210 07:07:48.914239 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ed72b4473c94f6c626b8aa7c73322116872e5bda9494c54d59f1fd9405cce81\": container with ID starting with 3ed72b4473c94f6c626b8aa7c73322116872e5bda9494c54d59f1fd9405cce81 not found: ID does not exist" containerID="3ed72b4473c94f6c626b8aa7c73322116872e5bda9494c54d59f1fd9405cce81" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.914318 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ed72b4473c94f6c626b8aa7c73322116872e5bda9494c54d59f1fd9405cce81"} err="failed to get container status \"3ed72b4473c94f6c626b8aa7c73322116872e5bda9494c54d59f1fd9405cce81\": rpc error: code = NotFound desc = could not find container \"3ed72b4473c94f6c626b8aa7c73322116872e5bda9494c54d59f1fd9405cce81\": container with ID starting with 3ed72b4473c94f6c626b8aa7c73322116872e5bda9494c54d59f1fd9405cce81 not found: ID does not exist" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.914347 4765 scope.go:117] "RemoveContainer" containerID="3e98de9e273e3bd80065e30eab72f4b108e36a4fdc99b2aa86c93bd692ed9742" Dec 10 07:07:48 crc kubenswrapper[4765]: E1210 07:07:48.914809 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e98de9e273e3bd80065e30eab72f4b108e36a4fdc99b2aa86c93bd692ed9742\": container with ID starting with 3e98de9e273e3bd80065e30eab72f4b108e36a4fdc99b2aa86c93bd692ed9742 not found: ID does not exist" containerID="3e98de9e273e3bd80065e30eab72f4b108e36a4fdc99b2aa86c93bd692ed9742" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.914871 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e98de9e273e3bd80065e30eab72f4b108e36a4fdc99b2aa86c93bd692ed9742"} err="failed to get container status \"3e98de9e273e3bd80065e30eab72f4b108e36a4fdc99b2aa86c93bd692ed9742\": rpc error: code = NotFound desc = could not find container \"3e98de9e273e3bd80065e30eab72f4b108e36a4fdc99b2aa86c93bd692ed9742\": container with ID starting with 3e98de9e273e3bd80065e30eab72f4b108e36a4fdc99b2aa86c93bd692ed9742 not found: ID does not exist" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.914910 4765 scope.go:117] "RemoveContainer" containerID="f168bdf311d8eebf80a4ee3b8d92a262db08daf89eba9e01b61a63bf2d06e9ba" Dec 10 07:07:48 crc kubenswrapper[4765]: E1210 07:07:48.915289 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f168bdf311d8eebf80a4ee3b8d92a262db08daf89eba9e01b61a63bf2d06e9ba\": container with ID starting with f168bdf311d8eebf80a4ee3b8d92a262db08daf89eba9e01b61a63bf2d06e9ba not found: ID does not exist" containerID="f168bdf311d8eebf80a4ee3b8d92a262db08daf89eba9e01b61a63bf2d06e9ba" Dec 10 07:07:48 crc kubenswrapper[4765]: I1210 07:07:48.915324 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f168bdf311d8eebf80a4ee3b8d92a262db08daf89eba9e01b61a63bf2d06e9ba"} err="failed to get container status \"f168bdf311d8eebf80a4ee3b8d92a262db08daf89eba9e01b61a63bf2d06e9ba\": rpc error: code = NotFound desc = could not find container \"f168bdf311d8eebf80a4ee3b8d92a262db08daf89eba9e01b61a63bf2d06e9ba\": container with ID starting with f168bdf311d8eebf80a4ee3b8d92a262db08daf89eba9e01b61a63bf2d06e9ba not found: ID does not exist" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.077698 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757dc6fff9-hqhxd"] Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.084821 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-757dc6fff9-hqhxd"] Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.303486 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-dc1e-account-create-update-tz4tl"] Dec 10 07:07:49 crc kubenswrapper[4765]: E1210 07:07:49.303902 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.303925 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" Dec 10 07:07:49 crc kubenswrapper[4765]: E1210 07:07:49.303937 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="init" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.303943 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="init" Dec 10 07:07:49 crc kubenswrapper[4765]: E1210 07:07:49.303966 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.303974 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.304184 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.304203 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" containerName="dnsmasq-dns" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.304870 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-dc1e-account-create-update-tz4tl" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.311202 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.322346 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-dc1e-account-create-update-tz4tl"] Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.357163 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-zx6hx"] Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.358238 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-zx6hx" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.372217 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-zx6hx"] Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.415533 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gqgn\" (UniqueName: \"kubernetes.io/projected/9d180e18-f719-4221-bdaf-a07eb74f2b6f-kube-api-access-9gqgn\") pod \"keystone-dc1e-account-create-update-tz4tl\" (UID: \"9d180e18-f719-4221-bdaf-a07eb74f2b6f\") " pod="openstack/keystone-dc1e-account-create-update-tz4tl" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.415705 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d180e18-f719-4221-bdaf-a07eb74f2b6f-operator-scripts\") pod \"keystone-dc1e-account-create-update-tz4tl\" (UID: \"9d180e18-f719-4221-bdaf-a07eb74f2b6f\") " pod="openstack/keystone-dc1e-account-create-update-tz4tl" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.517909 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d180e18-f719-4221-bdaf-a07eb74f2b6f-operator-scripts\") pod \"keystone-dc1e-account-create-update-tz4tl\" (UID: \"9d180e18-f719-4221-bdaf-a07eb74f2b6f\") " pod="openstack/keystone-dc1e-account-create-update-tz4tl" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.517969 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a76bfc27-647e-4e27-88d4-65c8fc8e1815-operator-scripts\") pod \"keystone-db-create-zx6hx\" (UID: \"a76bfc27-647e-4e27-88d4-65c8fc8e1815\") " pod="openstack/keystone-db-create-zx6hx" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.518059 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9g4kg\" (UniqueName: \"kubernetes.io/projected/a76bfc27-647e-4e27-88d4-65c8fc8e1815-kube-api-access-9g4kg\") pod \"keystone-db-create-zx6hx\" (UID: \"a76bfc27-647e-4e27-88d4-65c8fc8e1815\") " pod="openstack/keystone-db-create-zx6hx" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.518105 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gqgn\" (UniqueName: \"kubernetes.io/projected/9d180e18-f719-4221-bdaf-a07eb74f2b6f-kube-api-access-9gqgn\") pod \"keystone-dc1e-account-create-update-tz4tl\" (UID: \"9d180e18-f719-4221-bdaf-a07eb74f2b6f\") " pod="openstack/keystone-dc1e-account-create-update-tz4tl" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.518910 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d180e18-f719-4221-bdaf-a07eb74f2b6f-operator-scripts\") pod \"keystone-dc1e-account-create-update-tz4tl\" (UID: \"9d180e18-f719-4221-bdaf-a07eb74f2b6f\") " pod="openstack/keystone-dc1e-account-create-update-tz4tl" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.533740 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.535952 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gqgn\" (UniqueName: \"kubernetes.io/projected/9d180e18-f719-4221-bdaf-a07eb74f2b6f-kube-api-access-9gqgn\") pod \"keystone-dc1e-account-create-update-tz4tl\" (UID: \"9d180e18-f719-4221-bdaf-a07eb74f2b6f\") " pod="openstack/keystone-dc1e-account-create-update-tz4tl" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.620897 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a76bfc27-647e-4e27-88d4-65c8fc8e1815-operator-scripts\") pod \"keystone-db-create-zx6hx\" (UID: \"a76bfc27-647e-4e27-88d4-65c8fc8e1815\") " pod="openstack/keystone-db-create-zx6hx" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.621131 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9g4kg\" (UniqueName: \"kubernetes.io/projected/a76bfc27-647e-4e27-88d4-65c8fc8e1815-kube-api-access-9g4kg\") pod \"keystone-db-create-zx6hx\" (UID: \"a76bfc27-647e-4e27-88d4-65c8fc8e1815\") " pod="openstack/keystone-db-create-zx6hx" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.621894 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a76bfc27-647e-4e27-88d4-65c8fc8e1815-operator-scripts\") pod \"keystone-db-create-zx6hx\" (UID: \"a76bfc27-647e-4e27-88d4-65c8fc8e1815\") " pod="openstack/keystone-db-create-zx6hx" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.623626 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-dc1e-account-create-update-tz4tl" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.654013 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-jvjbd"] Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.655196 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-jvjbd" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.660581 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9g4kg\" (UniqueName: \"kubernetes.io/projected/a76bfc27-647e-4e27-88d4-65c8fc8e1815-kube-api-access-9g4kg\") pod \"keystone-db-create-zx6hx\" (UID: \"a76bfc27-647e-4e27-88d4-65c8fc8e1815\") " pod="openstack/keystone-db-create-zx6hx" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.667183 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-jvjbd"] Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.680285 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-zx6hx" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.763564 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-bd79-account-create-update-zdk9x"] Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.764700 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-bd79-account-create-update-zdk9x" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.767799 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.781203 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-bd79-account-create-update-zdk9x"] Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.839335 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3eeab68-7df8-402a-a581-23af26d7be84-operator-scripts\") pod \"placement-db-create-jvjbd\" (UID: \"e3eeab68-7df8-402a-a581-23af26d7be84\") " pod="openstack/placement-db-create-jvjbd" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.840289 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2x7j\" (UniqueName: \"kubernetes.io/projected/e3eeab68-7df8-402a-a581-23af26d7be84-kube-api-access-g2x7j\") pod \"placement-db-create-jvjbd\" (UID: \"e3eeab68-7df8-402a-a581-23af26d7be84\") " pod="openstack/placement-db-create-jvjbd" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.941960 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b8d221c-31b8-4dd5-b89c-2545b88461d5-operator-scripts\") pod \"placement-bd79-account-create-update-zdk9x\" (UID: \"3b8d221c-31b8-4dd5-b89c-2545b88461d5\") " pod="openstack/placement-bd79-account-create-update-zdk9x" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.942357 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2x7j\" (UniqueName: \"kubernetes.io/projected/e3eeab68-7df8-402a-a581-23af26d7be84-kube-api-access-g2x7j\") pod \"placement-db-create-jvjbd\" (UID: \"e3eeab68-7df8-402a-a581-23af26d7be84\") " pod="openstack/placement-db-create-jvjbd" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.942453 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5n6vh\" (UniqueName: \"kubernetes.io/projected/3b8d221c-31b8-4dd5-b89c-2545b88461d5-kube-api-access-5n6vh\") pod \"placement-bd79-account-create-update-zdk9x\" (UID: \"3b8d221c-31b8-4dd5-b89c-2545b88461d5\") " pod="openstack/placement-bd79-account-create-update-zdk9x" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.942712 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3eeab68-7df8-402a-a581-23af26d7be84-operator-scripts\") pod \"placement-db-create-jvjbd\" (UID: \"e3eeab68-7df8-402a-a581-23af26d7be84\") " pod="openstack/placement-db-create-jvjbd" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.945566 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3eeab68-7df8-402a-a581-23af26d7be84-operator-scripts\") pod \"placement-db-create-jvjbd\" (UID: \"e3eeab68-7df8-402a-a581-23af26d7be84\") " pod="openstack/placement-db-create-jvjbd" Dec 10 07:07:49 crc kubenswrapper[4765]: I1210 07:07:49.963274 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2x7j\" (UniqueName: \"kubernetes.io/projected/e3eeab68-7df8-402a-a581-23af26d7be84-kube-api-access-g2x7j\") pod \"placement-db-create-jvjbd\" (UID: \"e3eeab68-7df8-402a-a581-23af26d7be84\") " pod="openstack/placement-db-create-jvjbd" Dec 10 07:07:50 crc kubenswrapper[4765]: I1210 07:07:50.044523 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5n6vh\" (UniqueName: \"kubernetes.io/projected/3b8d221c-31b8-4dd5-b89c-2545b88461d5-kube-api-access-5n6vh\") pod \"placement-bd79-account-create-update-zdk9x\" (UID: \"3b8d221c-31b8-4dd5-b89c-2545b88461d5\") " pod="openstack/placement-bd79-account-create-update-zdk9x" Dec 10 07:07:50 crc kubenswrapper[4765]: I1210 07:07:50.044673 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b8d221c-31b8-4dd5-b89c-2545b88461d5-operator-scripts\") pod \"placement-bd79-account-create-update-zdk9x\" (UID: \"3b8d221c-31b8-4dd5-b89c-2545b88461d5\") " pod="openstack/placement-bd79-account-create-update-zdk9x" Dec 10 07:07:50 crc kubenswrapper[4765]: I1210 07:07:50.053112 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b8d221c-31b8-4dd5-b89c-2545b88461d5-operator-scripts\") pod \"placement-bd79-account-create-update-zdk9x\" (UID: \"3b8d221c-31b8-4dd5-b89c-2545b88461d5\") " pod="openstack/placement-bd79-account-create-update-zdk9x" Dec 10 07:07:50 crc kubenswrapper[4765]: I1210 07:07:50.063423 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5n6vh\" (UniqueName: \"kubernetes.io/projected/3b8d221c-31b8-4dd5-b89c-2545b88461d5-kube-api-access-5n6vh\") pod \"placement-bd79-account-create-update-zdk9x\" (UID: \"3b8d221c-31b8-4dd5-b89c-2545b88461d5\") " pod="openstack/placement-bd79-account-create-update-zdk9x" Dec 10 07:07:50 crc kubenswrapper[4765]: I1210 07:07:50.064500 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-jvjbd" Dec 10 07:07:50 crc kubenswrapper[4765]: I1210 07:07:50.092731 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-bd79-account-create-update-zdk9x" Dec 10 07:07:50 crc kubenswrapper[4765]: I1210 07:07:50.223566 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-zx6hx"] Dec 10 07:07:50 crc kubenswrapper[4765]: I1210 07:07:50.337041 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-dc1e-account-create-update-tz4tl"] Dec 10 07:07:50 crc kubenswrapper[4765]: I1210 07:07:50.609364 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddcadac8-8742-4e99-9838-d62fced40efa" path="/var/lib/kubelet/pods/ddcadac8-8742-4e99-9838-d62fced40efa/volumes" Dec 10 07:07:50 crc kubenswrapper[4765]: I1210 07:07:50.610041 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-jvjbd"] Dec 10 07:07:50 crc kubenswrapper[4765]: I1210 07:07:50.697175 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-bd79-account-create-update-zdk9x"] Dec 10 07:07:50 crc kubenswrapper[4765]: I1210 07:07:50.770539 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-dc1e-account-create-update-tz4tl" event={"ID":"9d180e18-f719-4221-bdaf-a07eb74f2b6f","Type":"ContainerStarted","Data":"0b9385d3a2b472129ce31b997095c3d9e59316e9f7c4dab74278e1f6448b2654"} Dec 10 07:07:50 crc kubenswrapper[4765]: I1210 07:07:50.770582 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-dc1e-account-create-update-tz4tl" event={"ID":"9d180e18-f719-4221-bdaf-a07eb74f2b6f","Type":"ContainerStarted","Data":"ec1b40817070c3bbd2c0ccf38717155687d8c6051e5bf6789111a30a991fe372"} Dec 10 07:07:50 crc kubenswrapper[4765]: I1210 07:07:50.774448 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-zx6hx" event={"ID":"a76bfc27-647e-4e27-88d4-65c8fc8e1815","Type":"ContainerStarted","Data":"0720be3be9097ccf18ece7dc254d8edf91ea794a7f29bb4e275248ca1c0e9261"} Dec 10 07:07:50 crc kubenswrapper[4765]: I1210 07:07:50.774516 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-zx6hx" event={"ID":"a76bfc27-647e-4e27-88d4-65c8fc8e1815","Type":"ContainerStarted","Data":"81228af5b5cf916513e56818d9ff4780d81a2f69e9329b238c38502e424f6d86"} Dec 10 07:07:50 crc kubenswrapper[4765]: I1210 07:07:50.776809 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-bd79-account-create-update-zdk9x" event={"ID":"3b8d221c-31b8-4dd5-b89c-2545b88461d5","Type":"ContainerStarted","Data":"e4d3eac2c216d2607c86e64b5da4c481e93d88cc647bcbd844bd7e5a3644ece2"} Dec 10 07:07:50 crc kubenswrapper[4765]: I1210 07:07:50.777906 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-jvjbd" event={"ID":"e3eeab68-7df8-402a-a581-23af26d7be84","Type":"ContainerStarted","Data":"f5fa0b017346757a9b18b35e726ba29303b820a0b79c0cb43aa7bf63253257c9"} Dec 10 07:07:51 crc kubenswrapper[4765]: I1210 07:07:51.786066 4765 generic.go:334] "Generic (PLEG): container finished" podID="a76bfc27-647e-4e27-88d4-65c8fc8e1815" containerID="0720be3be9097ccf18ece7dc254d8edf91ea794a7f29bb4e275248ca1c0e9261" exitCode=0 Dec 10 07:07:51 crc kubenswrapper[4765]: I1210 07:07:51.786116 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-zx6hx" event={"ID":"a76bfc27-647e-4e27-88d4-65c8fc8e1815","Type":"ContainerDied","Data":"0720be3be9097ccf18ece7dc254d8edf91ea794a7f29bb4e275248ca1c0e9261"} Dec 10 07:07:51 crc kubenswrapper[4765]: I1210 07:07:51.787880 4765 generic.go:334] "Generic (PLEG): container finished" podID="3b8d221c-31b8-4dd5-b89c-2545b88461d5" containerID="0646d1ce4bef47ba9bf9033b093dfa3beee93176dafb3db9996b42d2069ba6ef" exitCode=0 Dec 10 07:07:51 crc kubenswrapper[4765]: I1210 07:07:51.788001 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-bd79-account-create-update-zdk9x" event={"ID":"3b8d221c-31b8-4dd5-b89c-2545b88461d5","Type":"ContainerDied","Data":"0646d1ce4bef47ba9bf9033b093dfa3beee93176dafb3db9996b42d2069ba6ef"} Dec 10 07:07:51 crc kubenswrapper[4765]: I1210 07:07:51.789623 4765 generic.go:334] "Generic (PLEG): container finished" podID="e3eeab68-7df8-402a-a581-23af26d7be84" containerID="77ab2e0016b06d132a4b5d1f9e9e65250792bd9f6a7224bc5a0f2c0925f0b66e" exitCode=0 Dec 10 07:07:51 crc kubenswrapper[4765]: I1210 07:07:51.789683 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-jvjbd" event={"ID":"e3eeab68-7df8-402a-a581-23af26d7be84","Type":"ContainerDied","Data":"77ab2e0016b06d132a4b5d1f9e9e65250792bd9f6a7224bc5a0f2c0925f0b66e"} Dec 10 07:07:51 crc kubenswrapper[4765]: I1210 07:07:51.791242 4765 generic.go:334] "Generic (PLEG): container finished" podID="9d180e18-f719-4221-bdaf-a07eb74f2b6f" containerID="0b9385d3a2b472129ce31b997095c3d9e59316e9f7c4dab74278e1f6448b2654" exitCode=0 Dec 10 07:07:51 crc kubenswrapper[4765]: I1210 07:07:51.791299 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-dc1e-account-create-update-tz4tl" event={"ID":"9d180e18-f719-4221-bdaf-a07eb74f2b6f","Type":"ContainerDied","Data":"0b9385d3a2b472129ce31b997095c3d9e59316e9f7c4dab74278e1f6448b2654"} Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.168924 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-zx6hx" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.273924 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-jvjbd" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.280874 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-bd79-account-create-update-zdk9x" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.296516 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-dc1e-account-create-update-tz4tl" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.304401 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a76bfc27-647e-4e27-88d4-65c8fc8e1815-operator-scripts\") pod \"a76bfc27-647e-4e27-88d4-65c8fc8e1815\" (UID: \"a76bfc27-647e-4e27-88d4-65c8fc8e1815\") " Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.310792 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9g4kg\" (UniqueName: \"kubernetes.io/projected/a76bfc27-647e-4e27-88d4-65c8fc8e1815-kube-api-access-9g4kg\") pod \"a76bfc27-647e-4e27-88d4-65c8fc8e1815\" (UID: \"a76bfc27-647e-4e27-88d4-65c8fc8e1815\") " Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.310484 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a76bfc27-647e-4e27-88d4-65c8fc8e1815-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a76bfc27-647e-4e27-88d4-65c8fc8e1815" (UID: "a76bfc27-647e-4e27-88d4-65c8fc8e1815"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.321219 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a76bfc27-647e-4e27-88d4-65c8fc8e1815-kube-api-access-9g4kg" (OuterVolumeSpecName: "kube-api-access-9g4kg") pod "a76bfc27-647e-4e27-88d4-65c8fc8e1815" (UID: "a76bfc27-647e-4e27-88d4-65c8fc8e1815"). InnerVolumeSpecName "kube-api-access-9g4kg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.412737 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g2x7j\" (UniqueName: \"kubernetes.io/projected/e3eeab68-7df8-402a-a581-23af26d7be84-kube-api-access-g2x7j\") pod \"e3eeab68-7df8-402a-a581-23af26d7be84\" (UID: \"e3eeab68-7df8-402a-a581-23af26d7be84\") " Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.412798 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3eeab68-7df8-402a-a581-23af26d7be84-operator-scripts\") pod \"e3eeab68-7df8-402a-a581-23af26d7be84\" (UID: \"e3eeab68-7df8-402a-a581-23af26d7be84\") " Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.412855 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5n6vh\" (UniqueName: \"kubernetes.io/projected/3b8d221c-31b8-4dd5-b89c-2545b88461d5-kube-api-access-5n6vh\") pod \"3b8d221c-31b8-4dd5-b89c-2545b88461d5\" (UID: \"3b8d221c-31b8-4dd5-b89c-2545b88461d5\") " Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.412925 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d180e18-f719-4221-bdaf-a07eb74f2b6f-operator-scripts\") pod \"9d180e18-f719-4221-bdaf-a07eb74f2b6f\" (UID: \"9d180e18-f719-4221-bdaf-a07eb74f2b6f\") " Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.412988 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gqgn\" (UniqueName: \"kubernetes.io/projected/9d180e18-f719-4221-bdaf-a07eb74f2b6f-kube-api-access-9gqgn\") pod \"9d180e18-f719-4221-bdaf-a07eb74f2b6f\" (UID: \"9d180e18-f719-4221-bdaf-a07eb74f2b6f\") " Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.413003 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b8d221c-31b8-4dd5-b89c-2545b88461d5-operator-scripts\") pod \"3b8d221c-31b8-4dd5-b89c-2545b88461d5\" (UID: \"3b8d221c-31b8-4dd5-b89c-2545b88461d5\") " Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.413341 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3eeab68-7df8-402a-a581-23af26d7be84-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e3eeab68-7df8-402a-a581-23af26d7be84" (UID: "e3eeab68-7df8-402a-a581-23af26d7be84"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.413435 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d180e18-f719-4221-bdaf-a07eb74f2b6f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9d180e18-f719-4221-bdaf-a07eb74f2b6f" (UID: "9d180e18-f719-4221-bdaf-a07eb74f2b6f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.413498 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3eeab68-7df8-402a-a581-23af26d7be84-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.413579 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9g4kg\" (UniqueName: \"kubernetes.io/projected/a76bfc27-647e-4e27-88d4-65c8fc8e1815-kube-api-access-9g4kg\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.413607 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a76bfc27-647e-4e27-88d4-65c8fc8e1815-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.413660 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b8d221c-31b8-4dd5-b89c-2545b88461d5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3b8d221c-31b8-4dd5-b89c-2545b88461d5" (UID: "3b8d221c-31b8-4dd5-b89c-2545b88461d5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.415920 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d180e18-f719-4221-bdaf-a07eb74f2b6f-kube-api-access-9gqgn" (OuterVolumeSpecName: "kube-api-access-9gqgn") pod "9d180e18-f719-4221-bdaf-a07eb74f2b6f" (UID: "9d180e18-f719-4221-bdaf-a07eb74f2b6f"). InnerVolumeSpecName "kube-api-access-9gqgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.416273 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3eeab68-7df8-402a-a581-23af26d7be84-kube-api-access-g2x7j" (OuterVolumeSpecName: "kube-api-access-g2x7j") pod "e3eeab68-7df8-402a-a581-23af26d7be84" (UID: "e3eeab68-7df8-402a-a581-23af26d7be84"). InnerVolumeSpecName "kube-api-access-g2x7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.416643 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b8d221c-31b8-4dd5-b89c-2545b88461d5-kube-api-access-5n6vh" (OuterVolumeSpecName: "kube-api-access-5n6vh") pod "3b8d221c-31b8-4dd5-b89c-2545b88461d5" (UID: "3b8d221c-31b8-4dd5-b89c-2545b88461d5"). InnerVolumeSpecName "kube-api-access-5n6vh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.514764 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g2x7j\" (UniqueName: \"kubernetes.io/projected/e3eeab68-7df8-402a-a581-23af26d7be84-kube-api-access-g2x7j\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.514799 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5n6vh\" (UniqueName: \"kubernetes.io/projected/3b8d221c-31b8-4dd5-b89c-2545b88461d5-kube-api-access-5n6vh\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.514811 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d180e18-f719-4221-bdaf-a07eb74f2b6f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.514820 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gqgn\" (UniqueName: \"kubernetes.io/projected/9d180e18-f719-4221-bdaf-a07eb74f2b6f-kube-api-access-9gqgn\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.514831 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b8d221c-31b8-4dd5-b89c-2545b88461d5-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.806816 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-dc1e-account-create-update-tz4tl" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.806810 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-dc1e-account-create-update-tz4tl" event={"ID":"9d180e18-f719-4221-bdaf-a07eb74f2b6f","Type":"ContainerDied","Data":"ec1b40817070c3bbd2c0ccf38717155687d8c6051e5bf6789111a30a991fe372"} Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.806940 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec1b40817070c3bbd2c0ccf38717155687d8c6051e5bf6789111a30a991fe372" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.808461 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-zx6hx" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.808701 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-zx6hx" event={"ID":"a76bfc27-647e-4e27-88d4-65c8fc8e1815","Type":"ContainerDied","Data":"81228af5b5cf916513e56818d9ff4780d81a2f69e9329b238c38502e424f6d86"} Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.808750 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81228af5b5cf916513e56818d9ff4780d81a2f69e9329b238c38502e424f6d86" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.809789 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-bd79-account-create-update-zdk9x" event={"ID":"3b8d221c-31b8-4dd5-b89c-2545b88461d5","Type":"ContainerDied","Data":"e4d3eac2c216d2607c86e64b5da4c481e93d88cc647bcbd844bd7e5a3644ece2"} Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.809816 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-bd79-account-create-update-zdk9x" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.809819 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4d3eac2c216d2607c86e64b5da4c481e93d88cc647bcbd844bd7e5a3644ece2" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.811397 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-jvjbd" event={"ID":"e3eeab68-7df8-402a-a581-23af26d7be84","Type":"ContainerDied","Data":"f5fa0b017346757a9b18b35e726ba29303b820a0b79c0cb43aa7bf63253257c9"} Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.811418 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5fa0b017346757a9b18b35e726ba29303b820a0b79c0cb43aa7bf63253257c9" Dec 10 07:07:53 crc kubenswrapper[4765]: I1210 07:07:53.811437 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-jvjbd" Dec 10 07:07:54 crc kubenswrapper[4765]: I1210 07:07:54.534454 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Dec 10 07:07:54 crc kubenswrapper[4765]: I1210 07:07:54.971409 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-4bdwz"] Dec 10 07:07:54 crc kubenswrapper[4765]: E1210 07:07:54.972030 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3eeab68-7df8-402a-a581-23af26d7be84" containerName="mariadb-database-create" Dec 10 07:07:54 crc kubenswrapper[4765]: I1210 07:07:54.972042 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3eeab68-7df8-402a-a581-23af26d7be84" containerName="mariadb-database-create" Dec 10 07:07:54 crc kubenswrapper[4765]: E1210 07:07:54.972063 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a76bfc27-647e-4e27-88d4-65c8fc8e1815" containerName="mariadb-database-create" Dec 10 07:07:54 crc kubenswrapper[4765]: I1210 07:07:54.972069 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a76bfc27-647e-4e27-88d4-65c8fc8e1815" containerName="mariadb-database-create" Dec 10 07:07:54 crc kubenswrapper[4765]: E1210 07:07:54.972079 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d180e18-f719-4221-bdaf-a07eb74f2b6f" containerName="mariadb-account-create-update" Dec 10 07:07:54 crc kubenswrapper[4765]: I1210 07:07:54.972100 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d180e18-f719-4221-bdaf-a07eb74f2b6f" containerName="mariadb-account-create-update" Dec 10 07:07:54 crc kubenswrapper[4765]: E1210 07:07:54.972122 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b8d221c-31b8-4dd5-b89c-2545b88461d5" containerName="mariadb-account-create-update" Dec 10 07:07:54 crc kubenswrapper[4765]: I1210 07:07:54.972128 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b8d221c-31b8-4dd5-b89c-2545b88461d5" containerName="mariadb-account-create-update" Dec 10 07:07:54 crc kubenswrapper[4765]: I1210 07:07:54.972319 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="a76bfc27-647e-4e27-88d4-65c8fc8e1815" containerName="mariadb-database-create" Dec 10 07:07:54 crc kubenswrapper[4765]: I1210 07:07:54.972342 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b8d221c-31b8-4dd5-b89c-2545b88461d5" containerName="mariadb-account-create-update" Dec 10 07:07:54 crc kubenswrapper[4765]: I1210 07:07:54.972358 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3eeab68-7df8-402a-a581-23af26d7be84" containerName="mariadb-database-create" Dec 10 07:07:54 crc kubenswrapper[4765]: I1210 07:07:54.972372 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d180e18-f719-4221-bdaf-a07eb74f2b6f" containerName="mariadb-account-create-update" Dec 10 07:07:54 crc kubenswrapper[4765]: I1210 07:07:54.972912 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-4bdwz" Dec 10 07:07:54 crc kubenswrapper[4765]: I1210 07:07:54.984557 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-4bdwz"] Dec 10 07:07:54 crc kubenswrapper[4765]: I1210 07:07:54.991792 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-b1c8-account-create-update-7h2mp"] Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.006295 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-b1c8-account-create-update-7h2mp" Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.008525 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.016030 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-b1c8-account-create-update-7h2mp"] Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.150783 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zgkx\" (UniqueName: \"kubernetes.io/projected/facc4775-9655-433e-90d8-9dbea8e3ac63-kube-api-access-4zgkx\") pod \"glance-db-create-4bdwz\" (UID: \"facc4775-9655-433e-90d8-9dbea8e3ac63\") " pod="openstack/glance-db-create-4bdwz" Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.150948 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4fb93a5-4fe1-41ba-aed0-3f29d881fce5-operator-scripts\") pod \"glance-b1c8-account-create-update-7h2mp\" (UID: \"d4fb93a5-4fe1-41ba-aed0-3f29d881fce5\") " pod="openstack/glance-b1c8-account-create-update-7h2mp" Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.150985 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/facc4775-9655-433e-90d8-9dbea8e3ac63-operator-scripts\") pod \"glance-db-create-4bdwz\" (UID: \"facc4775-9655-433e-90d8-9dbea8e3ac63\") " pod="openstack/glance-db-create-4bdwz" Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.151025 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxcqt\" (UniqueName: \"kubernetes.io/projected/d4fb93a5-4fe1-41ba-aed0-3f29d881fce5-kube-api-access-xxcqt\") pod \"glance-b1c8-account-create-update-7h2mp\" (UID: \"d4fb93a5-4fe1-41ba-aed0-3f29d881fce5\") " pod="openstack/glance-b1c8-account-create-update-7h2mp" Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.252899 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4fb93a5-4fe1-41ba-aed0-3f29d881fce5-operator-scripts\") pod \"glance-b1c8-account-create-update-7h2mp\" (UID: \"d4fb93a5-4fe1-41ba-aed0-3f29d881fce5\") " pod="openstack/glance-b1c8-account-create-update-7h2mp" Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.252978 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/facc4775-9655-433e-90d8-9dbea8e3ac63-operator-scripts\") pod \"glance-db-create-4bdwz\" (UID: \"facc4775-9655-433e-90d8-9dbea8e3ac63\") " pod="openstack/glance-db-create-4bdwz" Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.253022 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxcqt\" (UniqueName: \"kubernetes.io/projected/d4fb93a5-4fe1-41ba-aed0-3f29d881fce5-kube-api-access-xxcqt\") pod \"glance-b1c8-account-create-update-7h2mp\" (UID: \"d4fb93a5-4fe1-41ba-aed0-3f29d881fce5\") " pod="openstack/glance-b1c8-account-create-update-7h2mp" Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.253107 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zgkx\" (UniqueName: \"kubernetes.io/projected/facc4775-9655-433e-90d8-9dbea8e3ac63-kube-api-access-4zgkx\") pod \"glance-db-create-4bdwz\" (UID: \"facc4775-9655-433e-90d8-9dbea8e3ac63\") " pod="openstack/glance-db-create-4bdwz" Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.253821 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4fb93a5-4fe1-41ba-aed0-3f29d881fce5-operator-scripts\") pod \"glance-b1c8-account-create-update-7h2mp\" (UID: \"d4fb93a5-4fe1-41ba-aed0-3f29d881fce5\") " pod="openstack/glance-b1c8-account-create-update-7h2mp" Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.254006 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/facc4775-9655-433e-90d8-9dbea8e3ac63-operator-scripts\") pod \"glance-db-create-4bdwz\" (UID: \"facc4775-9655-433e-90d8-9dbea8e3ac63\") " pod="openstack/glance-db-create-4bdwz" Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.275122 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zgkx\" (UniqueName: \"kubernetes.io/projected/facc4775-9655-433e-90d8-9dbea8e3ac63-kube-api-access-4zgkx\") pod \"glance-db-create-4bdwz\" (UID: \"facc4775-9655-433e-90d8-9dbea8e3ac63\") " pod="openstack/glance-db-create-4bdwz" Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.275455 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxcqt\" (UniqueName: \"kubernetes.io/projected/d4fb93a5-4fe1-41ba-aed0-3f29d881fce5-kube-api-access-xxcqt\") pod \"glance-b1c8-account-create-update-7h2mp\" (UID: \"d4fb93a5-4fe1-41ba-aed0-3f29d881fce5\") " pod="openstack/glance-b1c8-account-create-update-7h2mp" Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.289779 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-4bdwz" Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.323187 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-b1c8-account-create-update-7h2mp" Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.734242 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-4bdwz"] Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.835264 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.839817 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-b1c8-account-create-update-7h2mp"] Dec 10 07:07:55 crc kubenswrapper[4765]: I1210 07:07:55.845673 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-4bdwz" event={"ID":"facc4775-9655-433e-90d8-9dbea8e3ac63","Type":"ContainerStarted","Data":"ea87f16d4e46d862edf7a35aa0b3c2578441f2f4454045dade13f71f313eb784"} Dec 10 07:07:56 crc kubenswrapper[4765]: I1210 07:07:56.854989 4765 generic.go:334] "Generic (PLEG): container finished" podID="d4fb93a5-4fe1-41ba-aed0-3f29d881fce5" containerID="8f8cddb2be18a038febcc143f2e0f10989133f18e4472abe5e52335b05baf50b" exitCode=0 Dec 10 07:07:56 crc kubenswrapper[4765]: I1210 07:07:56.855348 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-b1c8-account-create-update-7h2mp" event={"ID":"d4fb93a5-4fe1-41ba-aed0-3f29d881fce5","Type":"ContainerDied","Data":"8f8cddb2be18a038febcc143f2e0f10989133f18e4472abe5e52335b05baf50b"} Dec 10 07:07:56 crc kubenswrapper[4765]: I1210 07:07:56.855380 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-b1c8-account-create-update-7h2mp" event={"ID":"d4fb93a5-4fe1-41ba-aed0-3f29d881fce5","Type":"ContainerStarted","Data":"a560c63c92558ef4f9355020c0856e3f3e831b44ca8d95d85745f534ba0b462a"} Dec 10 07:07:56 crc kubenswrapper[4765]: I1210 07:07:56.857601 4765 generic.go:334] "Generic (PLEG): container finished" podID="b145a008-b02d-4c5b-b426-5da68b2d2d09" containerID="fa5034942a101db0b3408988ffd5a22073166bac86fe65bff4aae4090676f49b" exitCode=0 Dec 10 07:07:56 crc kubenswrapper[4765]: I1210 07:07:56.857657 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-9mxvj" event={"ID":"b145a008-b02d-4c5b-b426-5da68b2d2d09","Type":"ContainerDied","Data":"fa5034942a101db0b3408988ffd5a22073166bac86fe65bff4aae4090676f49b"} Dec 10 07:07:56 crc kubenswrapper[4765]: I1210 07:07:56.859824 4765 generic.go:334] "Generic (PLEG): container finished" podID="facc4775-9655-433e-90d8-9dbea8e3ac63" containerID="820d4c5babe9e47d26c867a774b3593a3faf3b4ab2eb66e4d0a76c25b022d470" exitCode=0 Dec 10 07:07:56 crc kubenswrapper[4765]: I1210 07:07:56.859843 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-4bdwz" event={"ID":"facc4775-9655-433e-90d8-9dbea8e3ac63","Type":"ContainerDied","Data":"820d4c5babe9e47d26c867a774b3593a3faf3b4ab2eb66e4d0a76c25b022d470"} Dec 10 07:07:56 crc kubenswrapper[4765]: I1210 07:07:56.893920 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.326898 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.457526 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-b1c8-account-create-update-7h2mp" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.484981 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-4bdwz" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.514136 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b145a008-b02d-4c5b-b426-5da68b2d2d09-scripts\") pod \"b145a008-b02d-4c5b-b426-5da68b2d2d09\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.514251 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b145a008-b02d-4c5b-b426-5da68b2d2d09-swiftconf\") pod \"b145a008-b02d-4c5b-b426-5da68b2d2d09\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.514303 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b145a008-b02d-4c5b-b426-5da68b2d2d09-dispersionconf\") pod \"b145a008-b02d-4c5b-b426-5da68b2d2d09\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.514361 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b145a008-b02d-4c5b-b426-5da68b2d2d09-combined-ca-bundle\") pod \"b145a008-b02d-4c5b-b426-5da68b2d2d09\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.514394 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b145a008-b02d-4c5b-b426-5da68b2d2d09-etc-swift\") pod \"b145a008-b02d-4c5b-b426-5da68b2d2d09\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.514447 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmzb9\" (UniqueName: \"kubernetes.io/projected/b145a008-b02d-4c5b-b426-5da68b2d2d09-kube-api-access-zmzb9\") pod \"b145a008-b02d-4c5b-b426-5da68b2d2d09\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.514474 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b145a008-b02d-4c5b-b426-5da68b2d2d09-ring-data-devices\") pod \"b145a008-b02d-4c5b-b426-5da68b2d2d09\" (UID: \"b145a008-b02d-4c5b-b426-5da68b2d2d09\") " Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.515561 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b145a008-b02d-4c5b-b426-5da68b2d2d09-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "b145a008-b02d-4c5b-b426-5da68b2d2d09" (UID: "b145a008-b02d-4c5b-b426-5da68b2d2d09"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.516225 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b145a008-b02d-4c5b-b426-5da68b2d2d09-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "b145a008-b02d-4c5b-b426-5da68b2d2d09" (UID: "b145a008-b02d-4c5b-b426-5da68b2d2d09"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.520769 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b145a008-b02d-4c5b-b426-5da68b2d2d09-kube-api-access-zmzb9" (OuterVolumeSpecName: "kube-api-access-zmzb9") pod "b145a008-b02d-4c5b-b426-5da68b2d2d09" (UID: "b145a008-b02d-4c5b-b426-5da68b2d2d09"). InnerVolumeSpecName "kube-api-access-zmzb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.525619 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b145a008-b02d-4c5b-b426-5da68b2d2d09-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "b145a008-b02d-4c5b-b426-5da68b2d2d09" (UID: "b145a008-b02d-4c5b-b426-5da68b2d2d09"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.540905 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b145a008-b02d-4c5b-b426-5da68b2d2d09-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b145a008-b02d-4c5b-b426-5da68b2d2d09" (UID: "b145a008-b02d-4c5b-b426-5da68b2d2d09"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.551442 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b145a008-b02d-4c5b-b426-5da68b2d2d09-scripts" (OuterVolumeSpecName: "scripts") pod "b145a008-b02d-4c5b-b426-5da68b2d2d09" (UID: "b145a008-b02d-4c5b-b426-5da68b2d2d09"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.559062 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b145a008-b02d-4c5b-b426-5da68b2d2d09-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "b145a008-b02d-4c5b-b426-5da68b2d2d09" (UID: "b145a008-b02d-4c5b-b426-5da68b2d2d09"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.615703 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4fb93a5-4fe1-41ba-aed0-3f29d881fce5-operator-scripts\") pod \"d4fb93a5-4fe1-41ba-aed0-3f29d881fce5\" (UID: \"d4fb93a5-4fe1-41ba-aed0-3f29d881fce5\") " Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.615832 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zgkx\" (UniqueName: \"kubernetes.io/projected/facc4775-9655-433e-90d8-9dbea8e3ac63-kube-api-access-4zgkx\") pod \"facc4775-9655-433e-90d8-9dbea8e3ac63\" (UID: \"facc4775-9655-433e-90d8-9dbea8e3ac63\") " Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.615920 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxcqt\" (UniqueName: \"kubernetes.io/projected/d4fb93a5-4fe1-41ba-aed0-3f29d881fce5-kube-api-access-xxcqt\") pod \"d4fb93a5-4fe1-41ba-aed0-3f29d881fce5\" (UID: \"d4fb93a5-4fe1-41ba-aed0-3f29d881fce5\") " Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.615975 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/facc4775-9655-433e-90d8-9dbea8e3ac63-operator-scripts\") pod \"facc4775-9655-433e-90d8-9dbea8e3ac63\" (UID: \"facc4775-9655-433e-90d8-9dbea8e3ac63\") " Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.616340 4765 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b145a008-b02d-4c5b-b426-5da68b2d2d09-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.616358 4765 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b145a008-b02d-4c5b-b426-5da68b2d2d09-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.616368 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b145a008-b02d-4c5b-b426-5da68b2d2d09-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.616376 4765 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b145a008-b02d-4c5b-b426-5da68b2d2d09-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.616385 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmzb9\" (UniqueName: \"kubernetes.io/projected/b145a008-b02d-4c5b-b426-5da68b2d2d09-kube-api-access-zmzb9\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.616393 4765 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b145a008-b02d-4c5b-b426-5da68b2d2d09-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.616401 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b145a008-b02d-4c5b-b426-5da68b2d2d09-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.616511 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/facc4775-9655-433e-90d8-9dbea8e3ac63-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "facc4775-9655-433e-90d8-9dbea8e3ac63" (UID: "facc4775-9655-433e-90d8-9dbea8e3ac63"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.616700 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4fb93a5-4fe1-41ba-aed0-3f29d881fce5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d4fb93a5-4fe1-41ba-aed0-3f29d881fce5" (UID: "d4fb93a5-4fe1-41ba-aed0-3f29d881fce5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.618449 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4fb93a5-4fe1-41ba-aed0-3f29d881fce5-kube-api-access-xxcqt" (OuterVolumeSpecName: "kube-api-access-xxcqt") pod "d4fb93a5-4fe1-41ba-aed0-3f29d881fce5" (UID: "d4fb93a5-4fe1-41ba-aed0-3f29d881fce5"). InnerVolumeSpecName "kube-api-access-xxcqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.619223 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/facc4775-9655-433e-90d8-9dbea8e3ac63-kube-api-access-4zgkx" (OuterVolumeSpecName: "kube-api-access-4zgkx") pod "facc4775-9655-433e-90d8-9dbea8e3ac63" (UID: "facc4775-9655-433e-90d8-9dbea8e3ac63"). InnerVolumeSpecName "kube-api-access-4zgkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.718461 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxcqt\" (UniqueName: \"kubernetes.io/projected/d4fb93a5-4fe1-41ba-aed0-3f29d881fce5-kube-api-access-xxcqt\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.719213 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/facc4775-9655-433e-90d8-9dbea8e3ac63-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.719231 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4fb93a5-4fe1-41ba-aed0-3f29d881fce5-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.719240 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zgkx\" (UniqueName: \"kubernetes.io/projected/facc4775-9655-433e-90d8-9dbea8e3ac63-kube-api-access-4zgkx\") on node \"crc\" DevicePath \"\"" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.880690 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-4bdwz" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.880686 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-4bdwz" event={"ID":"facc4775-9655-433e-90d8-9dbea8e3ac63","Type":"ContainerDied","Data":"ea87f16d4e46d862edf7a35aa0b3c2578441f2f4454045dade13f71f313eb784"} Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.880863 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea87f16d4e46d862edf7a35aa0b3c2578441f2f4454045dade13f71f313eb784" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.883230 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-b1c8-account-create-update-7h2mp" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.883516 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-b1c8-account-create-update-7h2mp" event={"ID":"d4fb93a5-4fe1-41ba-aed0-3f29d881fce5","Type":"ContainerDied","Data":"a560c63c92558ef4f9355020c0856e3f3e831b44ca8d95d85745f534ba0b462a"} Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.883621 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a560c63c92558ef4f9355020c0856e3f3e831b44ca8d95d85745f534ba0b462a" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.885220 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-9mxvj" event={"ID":"b145a008-b02d-4c5b-b426-5da68b2d2d09","Type":"ContainerDied","Data":"522ddc241cc8be1ef3c2a8741d750675aa3cc0ca4b7bfd4226a6d16f9ada366d"} Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.885261 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="522ddc241cc8be1ef3c2a8741d750675aa3cc0ca4b7bfd4226a6d16f9ada366d" Dec 10 07:07:58 crc kubenswrapper[4765]: I1210 07:07:58.885353 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-9mxvj" Dec 10 07:07:59 crc kubenswrapper[4765]: W1210 07:07:59.227326 4765 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb145a008_b02d_4c5b_b426_5da68b2d2d09.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb145a008_b02d_4c5b_b426_5da68b2d2d09.slice: no such file or directory Dec 10 07:07:59 crc kubenswrapper[4765]: W1210 07:07:59.227605 4765 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podddcadac8_8742_4e99_9838_d62fced40efa.slice/crio-3ed72b4473c94f6c626b8aa7c73322116872e5bda9494c54d59f1fd9405cce81.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podddcadac8_8742_4e99_9838_d62fced40efa.slice/crio-3ed72b4473c94f6c626b8aa7c73322116872e5bda9494c54d59f1fd9405cce81.scope: no such file or directory Dec 10 07:07:59 crc kubenswrapper[4765]: W1210 07:07:59.229143 4765 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d180e18_f719_4221_bdaf_a07eb74f2b6f.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d180e18_f719_4221_bdaf_a07eb74f2b6f.slice: no such file or directory Dec 10 07:07:59 crc kubenswrapper[4765]: W1210 07:07:59.229169 4765 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda76bfc27_647e_4e27_88d4_65c8fc8e1815.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda76bfc27_647e_4e27_88d4_65c8fc8e1815.slice: no such file or directory Dec 10 07:07:59 crc kubenswrapper[4765]: W1210 07:07:59.229187 4765 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode3eeab68_7df8_402a_a581_23af26d7be84.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode3eeab68_7df8_402a_a581_23af26d7be84.slice: no such file or directory Dec 10 07:07:59 crc kubenswrapper[4765]: W1210 07:07:59.229227 4765 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b8d221c_31b8_4dd5_b89c_2545b88461d5.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b8d221c_31b8_4dd5_b89c_2545b88461d5.slice: no such file or directory Dec 10 07:07:59 crc kubenswrapper[4765]: W1210 07:07:59.229278 4765 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfacc4775_9655_433e_90d8_9dbea8e3ac63.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfacc4775_9655_433e_90d8_9dbea8e3ac63.slice: no such file or directory Dec 10 07:07:59 crc kubenswrapper[4765]: W1210 07:07:59.229936 4765 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4fb93a5_4fe1_41ba_aed0_3f29d881fce5.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4fb93a5_4fe1_41ba_aed0_3f29d881fce5.slice: no such file or directory Dec 10 07:07:59 crc kubenswrapper[4765]: I1210 07:07:59.534440 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Dec 10 07:07:59 crc kubenswrapper[4765]: I1210 07:07:59.902740 4765 generic.go:334] "Generic (PLEG): container finished" podID="4934a96a-3419-4623-9f64-58e8d9206864" containerID="b5e74a15b203ef52f851095f1cc16190a0f4872664a016876134693584df8e67" exitCode=137 Dec 10 07:07:59 crc kubenswrapper[4765]: I1210 07:07:59.902794 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" event={"ID":"4934a96a-3419-4623-9f64-58e8d9206864","Type":"ContainerDied","Data":"b5e74a15b203ef52f851095f1cc16190a0f4872664a016876134693584df8e67"} Dec 10 07:07:59 crc kubenswrapper[4765]: I1210 07:07:59.902825 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" event={"ID":"4934a96a-3419-4623-9f64-58e8d9206864","Type":"ContainerDied","Data":"847eb1c62f57a1066ea20ea3f189e6e4f27b7612fbda1afc8dd680b853ca6769"} Dec 10 07:07:59 crc kubenswrapper[4765]: I1210 07:07:59.902836 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="847eb1c62f57a1066ea20ea3f189e6e4f27b7612fbda1afc8dd680b853ca6769" Dec 10 07:07:59 crc kubenswrapper[4765]: I1210 07:07:59.948179 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.040606 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4934a96a-3419-4623-9f64-58e8d9206864-config\") pod \"4934a96a-3419-4623-9f64-58e8d9206864\" (UID: \"4934a96a-3419-4623-9f64-58e8d9206864\") " Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.040651 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4934a96a-3419-4623-9f64-58e8d9206864-ovsdbserver-nb\") pod \"4934a96a-3419-4623-9f64-58e8d9206864\" (UID: \"4934a96a-3419-4623-9f64-58e8d9206864\") " Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.040726 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4934a96a-3419-4623-9f64-58e8d9206864-dns-svc\") pod \"4934a96a-3419-4623-9f64-58e8d9206864\" (UID: \"4934a96a-3419-4623-9f64-58e8d9206864\") " Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.040796 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hsgf7\" (UniqueName: \"kubernetes.io/projected/4934a96a-3419-4623-9f64-58e8d9206864-kube-api-access-hsgf7\") pod \"4934a96a-3419-4623-9f64-58e8d9206864\" (UID: \"4934a96a-3419-4623-9f64-58e8d9206864\") " Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.046741 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4934a96a-3419-4623-9f64-58e8d9206864-kube-api-access-hsgf7" (OuterVolumeSpecName: "kube-api-access-hsgf7") pod "4934a96a-3419-4623-9f64-58e8d9206864" (UID: "4934a96a-3419-4623-9f64-58e8d9206864"). InnerVolumeSpecName "kube-api-access-hsgf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.084361 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4934a96a-3419-4623-9f64-58e8d9206864-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4934a96a-3419-4623-9f64-58e8d9206864" (UID: "4934a96a-3419-4623-9f64-58e8d9206864"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.087696 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4934a96a-3419-4623-9f64-58e8d9206864-config" (OuterVolumeSpecName: "config") pod "4934a96a-3419-4623-9f64-58e8d9206864" (UID: "4934a96a-3419-4623-9f64-58e8d9206864"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.090068 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4934a96a-3419-4623-9f64-58e8d9206864-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4934a96a-3419-4623-9f64-58e8d9206864" (UID: "4934a96a-3419-4623-9f64-58e8d9206864"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.142829 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4934a96a-3419-4623-9f64-58e8d9206864-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.142868 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4934a96a-3419-4623-9f64-58e8d9206864-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.142878 4765 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4934a96a-3419-4623-9f64-58e8d9206864-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.142888 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hsgf7\" (UniqueName: \"kubernetes.io/projected/4934a96a-3419-4623-9f64-58e8d9206864-kube-api-access-hsgf7\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.206887 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-2sjp2"] Dec 10 07:08:00 crc kubenswrapper[4765]: E1210 07:08:00.207242 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4fb93a5-4fe1-41ba-aed0-3f29d881fce5" containerName="mariadb-account-create-update" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.207257 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4fb93a5-4fe1-41ba-aed0-3f29d881fce5" containerName="mariadb-account-create-update" Dec 10 07:08:00 crc kubenswrapper[4765]: E1210 07:08:00.207267 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="facc4775-9655-433e-90d8-9dbea8e3ac63" containerName="mariadb-database-create" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.207275 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="facc4775-9655-433e-90d8-9dbea8e3ac63" containerName="mariadb-database-create" Dec 10 07:08:00 crc kubenswrapper[4765]: E1210 07:08:00.207286 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.207293 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" Dec 10 07:08:00 crc kubenswrapper[4765]: E1210 07:08:00.207304 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b145a008-b02d-4c5b-b426-5da68b2d2d09" containerName="swift-ring-rebalance" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.207310 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="b145a008-b02d-4c5b-b426-5da68b2d2d09" containerName="swift-ring-rebalance" Dec 10 07:08:00 crc kubenswrapper[4765]: E1210 07:08:00.207324 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="init" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.207330 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="init" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.207486 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="4934a96a-3419-4623-9f64-58e8d9206864" containerName="dnsmasq-dns" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.207511 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="b145a008-b02d-4c5b-b426-5da68b2d2d09" containerName="swift-ring-rebalance" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.207527 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="facc4775-9655-433e-90d8-9dbea8e3ac63" containerName="mariadb-database-create" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.207544 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4fb93a5-4fe1-41ba-aed0-3f29d881fce5" containerName="mariadb-account-create-update" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.208055 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-2sjp2" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.211554 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-g8f64" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.214181 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.217720 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-2sjp2"] Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.345336 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/045989ab-ad14-4ec7-adda-fcb6054f8b6e-combined-ca-bundle\") pod \"glance-db-sync-2sjp2\" (UID: \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\") " pod="openstack/glance-db-sync-2sjp2" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.345447 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/045989ab-ad14-4ec7-adda-fcb6054f8b6e-config-data\") pod \"glance-db-sync-2sjp2\" (UID: \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\") " pod="openstack/glance-db-sync-2sjp2" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.345491 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/045989ab-ad14-4ec7-adda-fcb6054f8b6e-db-sync-config-data\") pod \"glance-db-sync-2sjp2\" (UID: \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\") " pod="openstack/glance-db-sync-2sjp2" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.345581 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrpnd\" (UniqueName: \"kubernetes.io/projected/045989ab-ad14-4ec7-adda-fcb6054f8b6e-kube-api-access-nrpnd\") pod \"glance-db-sync-2sjp2\" (UID: \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\") " pod="openstack/glance-db-sync-2sjp2" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.446800 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/045989ab-ad14-4ec7-adda-fcb6054f8b6e-combined-ca-bundle\") pod \"glance-db-sync-2sjp2\" (UID: \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\") " pod="openstack/glance-db-sync-2sjp2" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.446878 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/045989ab-ad14-4ec7-adda-fcb6054f8b6e-config-data\") pod \"glance-db-sync-2sjp2\" (UID: \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\") " pod="openstack/glance-db-sync-2sjp2" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.446923 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/045989ab-ad14-4ec7-adda-fcb6054f8b6e-db-sync-config-data\") pod \"glance-db-sync-2sjp2\" (UID: \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\") " pod="openstack/glance-db-sync-2sjp2" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.446991 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrpnd\" (UniqueName: \"kubernetes.io/projected/045989ab-ad14-4ec7-adda-fcb6054f8b6e-kube-api-access-nrpnd\") pod \"glance-db-sync-2sjp2\" (UID: \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\") " pod="openstack/glance-db-sync-2sjp2" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.450783 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/045989ab-ad14-4ec7-adda-fcb6054f8b6e-config-data\") pod \"glance-db-sync-2sjp2\" (UID: \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\") " pod="openstack/glance-db-sync-2sjp2" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.451413 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/045989ab-ad14-4ec7-adda-fcb6054f8b6e-db-sync-config-data\") pod \"glance-db-sync-2sjp2\" (UID: \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\") " pod="openstack/glance-db-sync-2sjp2" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.451608 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/045989ab-ad14-4ec7-adda-fcb6054f8b6e-combined-ca-bundle\") pod \"glance-db-sync-2sjp2\" (UID: \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\") " pod="openstack/glance-db-sync-2sjp2" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.467410 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrpnd\" (UniqueName: \"kubernetes.io/projected/045989ab-ad14-4ec7-adda-fcb6054f8b6e-kube-api-access-nrpnd\") pod \"glance-db-sync-2sjp2\" (UID: \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\") " pod="openstack/glance-db-sync-2sjp2" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.535224 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-2sjp2" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.909064 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x" Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.932242 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x"] Dec 10 07:08:00 crc kubenswrapper[4765]: I1210 07:08:00.939127 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7bbdc7ccd7-9qs4x"] Dec 10 07:08:01 crc kubenswrapper[4765]: I1210 07:08:01.048068 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-2sjp2"] Dec 10 07:08:01 crc kubenswrapper[4765]: I1210 07:08:01.918140 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-2sjp2" event={"ID":"045989ab-ad14-4ec7-adda-fcb6054f8b6e","Type":"ContainerStarted","Data":"650fdede7f295938e384b88baea48852295696b05abebdbe37400303e37af16e"} Dec 10 07:08:02 crc kubenswrapper[4765]: I1210 07:08:02.600843 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4934a96a-3419-4623-9f64-58e8d9206864" path="/var/lib/kubelet/pods/4934a96a-3419-4623-9f64-58e8d9206864/volumes" Dec 10 07:08:04 crc kubenswrapper[4765]: I1210 07:08:04.049198 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:08:04 crc kubenswrapper[4765]: I1210 07:08:04.049273 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:08:04 crc kubenswrapper[4765]: I1210 07:08:04.049350 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 07:08:04 crc kubenswrapper[4765]: I1210 07:08:04.050273 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8f3d21f8c3fe011f6de37bb9b8fe365dd62e648f60edb80df7c37bb446ad83d1"} pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 07:08:04 crc kubenswrapper[4765]: I1210 07:08:04.050358 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" containerID="cri-o://8f3d21f8c3fe011f6de37bb9b8fe365dd62e648f60edb80df7c37bb446ad83d1" gracePeriod=600 Dec 10 07:08:04 crc kubenswrapper[4765]: I1210 07:08:04.947199 4765 generic.go:334] "Generic (PLEG): container finished" podID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerID="8f3d21f8c3fe011f6de37bb9b8fe365dd62e648f60edb80df7c37bb446ad83d1" exitCode=0 Dec 10 07:08:04 crc kubenswrapper[4765]: I1210 07:08:04.947395 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerDied","Data":"8f3d21f8c3fe011f6de37bb9b8fe365dd62e648f60edb80df7c37bb446ad83d1"} Dec 10 07:08:04 crc kubenswrapper[4765]: I1210 07:08:04.947581 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"1c86e2badb0bfc9f0eb664aadfc0af5a709c20f5327fa62e0a9911a7da8c407c"} Dec 10 07:08:04 crc kubenswrapper[4765]: I1210 07:08:04.947611 4765 scope.go:117] "RemoveContainer" containerID="1a2948aa41622b94e272b106fd506ab6099b9c866ae8f86fff9e5dbf9e54046e" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.057532 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.104826 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.302185 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-hxr5k-config-4r9d8"] Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.308161 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.314317 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.318191 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/65a0a6a3-114a-4280-bc80-695ad7222aba-var-run\") pod \"ovn-controller-hxr5k-config-4r9d8\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.318264 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/65a0a6a3-114a-4280-bc80-695ad7222aba-var-run-ovn\") pod \"ovn-controller-hxr5k-config-4r9d8\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.318302 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/65a0a6a3-114a-4280-bc80-695ad7222aba-additional-scripts\") pod \"ovn-controller-hxr5k-config-4r9d8\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.318397 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/65a0a6a3-114a-4280-bc80-695ad7222aba-var-log-ovn\") pod \"ovn-controller-hxr5k-config-4r9d8\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.318439 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rczz\" (UniqueName: \"kubernetes.io/projected/65a0a6a3-114a-4280-bc80-695ad7222aba-kube-api-access-5rczz\") pod \"ovn-controller-hxr5k-config-4r9d8\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.318455 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65a0a6a3-114a-4280-bc80-695ad7222aba-scripts\") pod \"ovn-controller-hxr5k-config-4r9d8\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.320510 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hxr5k-config-4r9d8"] Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.419655 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/65a0a6a3-114a-4280-bc80-695ad7222aba-var-run\") pod \"ovn-controller-hxr5k-config-4r9d8\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.420044 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/65a0a6a3-114a-4280-bc80-695ad7222aba-var-run-ovn\") pod \"ovn-controller-hxr5k-config-4r9d8\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.420062 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/65a0a6a3-114a-4280-bc80-695ad7222aba-var-run\") pod \"ovn-controller-hxr5k-config-4r9d8\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.420141 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/65a0a6a3-114a-4280-bc80-695ad7222aba-var-run-ovn\") pod \"ovn-controller-hxr5k-config-4r9d8\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.420254 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/65a0a6a3-114a-4280-bc80-695ad7222aba-additional-scripts\") pod \"ovn-controller-hxr5k-config-4r9d8\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.420679 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/65a0a6a3-114a-4280-bc80-695ad7222aba-var-log-ovn\") pod \"ovn-controller-hxr5k-config-4r9d8\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.420917 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65a0a6a3-114a-4280-bc80-695ad7222aba-scripts\") pod \"ovn-controller-hxr5k-config-4r9d8\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.422815 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rczz\" (UniqueName: \"kubernetes.io/projected/65a0a6a3-114a-4280-bc80-695ad7222aba-kube-api-access-5rczz\") pod \"ovn-controller-hxr5k-config-4r9d8\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.421611 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/65a0a6a3-114a-4280-bc80-695ad7222aba-additional-scripts\") pod \"ovn-controller-hxr5k-config-4r9d8\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.422694 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65a0a6a3-114a-4280-bc80-695ad7222aba-scripts\") pod \"ovn-controller-hxr5k-config-4r9d8\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.420830 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/65a0a6a3-114a-4280-bc80-695ad7222aba-var-log-ovn\") pod \"ovn-controller-hxr5k-config-4r9d8\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.441702 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rczz\" (UniqueName: \"kubernetes.io/projected/65a0a6a3-114a-4280-bc80-695ad7222aba-kube-api-access-5rczz\") pod \"ovn-controller-hxr5k-config-4r9d8\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:12 crc kubenswrapper[4765]: I1210 07:08:12.632021 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:13 crc kubenswrapper[4765]: I1210 07:08:13.017007 4765 generic.go:334] "Generic (PLEG): container finished" podID="78b416b3-3796-4fa3-8a4f-7fa6107d98a1" containerID="fc75f40345fbe86be4d1614b512064bb219a33382f202793ae741275bbb66a36" exitCode=0 Dec 10 07:08:13 crc kubenswrapper[4765]: I1210 07:08:13.017097 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"78b416b3-3796-4fa3-8a4f-7fa6107d98a1","Type":"ContainerDied","Data":"fc75f40345fbe86be4d1614b512064bb219a33382f202793ae741275bbb66a36"} Dec 10 07:08:13 crc kubenswrapper[4765]: I1210 07:08:13.321457 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hxr5k-config-4r9d8"] Dec 10 07:08:14 crc kubenswrapper[4765]: I1210 07:08:14.026870 4765 generic.go:334] "Generic (PLEG): container finished" podID="65a0a6a3-114a-4280-bc80-695ad7222aba" containerID="55dfae8913f307f4ff17dd9b36b7ab88c420206a1add647e31c248ea0b272699" exitCode=0 Dec 10 07:08:14 crc kubenswrapper[4765]: I1210 07:08:14.026922 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxr5k-config-4r9d8" event={"ID":"65a0a6a3-114a-4280-bc80-695ad7222aba","Type":"ContainerDied","Data":"55dfae8913f307f4ff17dd9b36b7ab88c420206a1add647e31c248ea0b272699"} Dec 10 07:08:14 crc kubenswrapper[4765]: I1210 07:08:14.027445 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxr5k-config-4r9d8" event={"ID":"65a0a6a3-114a-4280-bc80-695ad7222aba","Type":"ContainerStarted","Data":"b78b37d0b6158745a16280d18220bd9ff7d902d70dfe40f37d35f432fd11153b"} Dec 10 07:08:14 crc kubenswrapper[4765]: I1210 07:08:14.029384 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-2sjp2" event={"ID":"045989ab-ad14-4ec7-adda-fcb6054f8b6e","Type":"ContainerStarted","Data":"ae0acddf4b81b4ef12582dae3c37d0db2b9a5b857d5bfe7687182a89daab11ff"} Dec 10 07:08:14 crc kubenswrapper[4765]: I1210 07:08:14.033339 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"78b416b3-3796-4fa3-8a4f-7fa6107d98a1","Type":"ContainerStarted","Data":"60f858d2d85b01e9da2a6a95ed2be831935e234ad3033222a4291f2052f6ce52"} Dec 10 07:08:14 crc kubenswrapper[4765]: I1210 07:08:14.033487 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 10 07:08:14 crc kubenswrapper[4765]: I1210 07:08:14.085656 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=97.863251722 podStartE2EDuration="2m29.085634822s" podCreationTimestamp="2025-12-10 07:05:45 +0000 UTC" firstStartedPulling="2025-12-10 07:05:47.152472505 +0000 UTC m=+1066.879137821" lastFinishedPulling="2025-12-10 07:06:38.374855605 +0000 UTC m=+1118.101520921" observedRunningTime="2025-12-10 07:08:14.076036869 +0000 UTC m=+1213.802702195" watchObservedRunningTime="2025-12-10 07:08:14.085634822 +0000 UTC m=+1213.812300138" Dec 10 07:08:14 crc kubenswrapper[4765]: I1210 07:08:14.106821 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-2sjp2" podStartSLOduration=2.18957085 podStartE2EDuration="14.106802782s" podCreationTimestamp="2025-12-10 07:08:00 +0000 UTC" firstStartedPulling="2025-12-10 07:08:01.054102442 +0000 UTC m=+1200.780767778" lastFinishedPulling="2025-12-10 07:08:12.971334394 +0000 UTC m=+1212.697999710" observedRunningTime="2025-12-10 07:08:14.093269538 +0000 UTC m=+1213.819934864" watchObservedRunningTime="2025-12-10 07:08:14.106802782 +0000 UTC m=+1213.833468098" Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.377692 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.480177 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/65a0a6a3-114a-4280-bc80-695ad7222aba-var-run\") pod \"65a0a6a3-114a-4280-bc80-695ad7222aba\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.480321 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65a0a6a3-114a-4280-bc80-695ad7222aba-var-run" (OuterVolumeSpecName: "var-run") pod "65a0a6a3-114a-4280-bc80-695ad7222aba" (UID: "65a0a6a3-114a-4280-bc80-695ad7222aba"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.480488 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65a0a6a3-114a-4280-bc80-695ad7222aba-scripts\") pod \"65a0a6a3-114a-4280-bc80-695ad7222aba\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.480584 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rczz\" (UniqueName: \"kubernetes.io/projected/65a0a6a3-114a-4280-bc80-695ad7222aba-kube-api-access-5rczz\") pod \"65a0a6a3-114a-4280-bc80-695ad7222aba\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.480688 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/65a0a6a3-114a-4280-bc80-695ad7222aba-var-log-ovn\") pod \"65a0a6a3-114a-4280-bc80-695ad7222aba\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.480713 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/65a0a6a3-114a-4280-bc80-695ad7222aba-var-run-ovn\") pod \"65a0a6a3-114a-4280-bc80-695ad7222aba\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.480794 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/65a0a6a3-114a-4280-bc80-695ad7222aba-additional-scripts\") pod \"65a0a6a3-114a-4280-bc80-695ad7222aba\" (UID: \"65a0a6a3-114a-4280-bc80-695ad7222aba\") " Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.480796 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65a0a6a3-114a-4280-bc80-695ad7222aba-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "65a0a6a3-114a-4280-bc80-695ad7222aba" (UID: "65a0a6a3-114a-4280-bc80-695ad7222aba"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.480850 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65a0a6a3-114a-4280-bc80-695ad7222aba-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "65a0a6a3-114a-4280-bc80-695ad7222aba" (UID: "65a0a6a3-114a-4280-bc80-695ad7222aba"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.481432 4765 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/65a0a6a3-114a-4280-bc80-695ad7222aba-var-run\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.481450 4765 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/65a0a6a3-114a-4280-bc80-695ad7222aba-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.481460 4765 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/65a0a6a3-114a-4280-bc80-695ad7222aba-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.481664 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65a0a6a3-114a-4280-bc80-695ad7222aba-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "65a0a6a3-114a-4280-bc80-695ad7222aba" (UID: "65a0a6a3-114a-4280-bc80-695ad7222aba"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.482250 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65a0a6a3-114a-4280-bc80-695ad7222aba-scripts" (OuterVolumeSpecName: "scripts") pod "65a0a6a3-114a-4280-bc80-695ad7222aba" (UID: "65a0a6a3-114a-4280-bc80-695ad7222aba"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.500316 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65a0a6a3-114a-4280-bc80-695ad7222aba-kube-api-access-5rczz" (OuterVolumeSpecName: "kube-api-access-5rczz") pod "65a0a6a3-114a-4280-bc80-695ad7222aba" (UID: "65a0a6a3-114a-4280-bc80-695ad7222aba"). InnerVolumeSpecName "kube-api-access-5rczz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.583251 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65a0a6a3-114a-4280-bc80-695ad7222aba-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.583301 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rczz\" (UniqueName: \"kubernetes.io/projected/65a0a6a3-114a-4280-bc80-695ad7222aba-kube-api-access-5rczz\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:15 crc kubenswrapper[4765]: I1210 07:08:15.583313 4765 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/65a0a6a3-114a-4280-bc80-695ad7222aba-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.051386 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxr5k-config-4r9d8" event={"ID":"65a0a6a3-114a-4280-bc80-695ad7222aba","Type":"ContainerDied","Data":"b78b37d0b6158745a16280d18220bd9ff7d902d70dfe40f37d35f432fd11153b"} Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.051750 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b78b37d0b6158745a16280d18220bd9ff7d902d70dfe40f37d35f432fd11153b" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.051448 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxr5k-config-4r9d8" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.489136 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-hxr5k-config-4r9d8"] Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.495758 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-hxr5k-config-4r9d8"] Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.599145 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65a0a6a3-114a-4280-bc80-695ad7222aba" path="/var/lib/kubelet/pods/65a0a6a3-114a-4280-bc80-695ad7222aba/volumes" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.654352 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-hxr5k-config-bnsjx"] Dec 10 07:08:16 crc kubenswrapper[4765]: E1210 07:08:16.655251 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65a0a6a3-114a-4280-bc80-695ad7222aba" containerName="ovn-config" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.655436 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="65a0a6a3-114a-4280-bc80-695ad7222aba" containerName="ovn-config" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.655661 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="65a0a6a3-114a-4280-bc80-695ad7222aba" containerName="ovn-config" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.656282 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.660000 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.680634 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hxr5k-config-bnsjx"] Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.709904 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a5ba9db2-5261-4e5d-8304-038b907a953d-var-run-ovn\") pod \"ovn-controller-hxr5k-config-bnsjx\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.709958 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2ntp\" (UniqueName: \"kubernetes.io/projected/a5ba9db2-5261-4e5d-8304-038b907a953d-kube-api-access-n2ntp\") pod \"ovn-controller-hxr5k-config-bnsjx\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.709989 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a5ba9db2-5261-4e5d-8304-038b907a953d-additional-scripts\") pod \"ovn-controller-hxr5k-config-bnsjx\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.710067 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a5ba9db2-5261-4e5d-8304-038b907a953d-var-log-ovn\") pod \"ovn-controller-hxr5k-config-bnsjx\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.710238 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a5ba9db2-5261-4e5d-8304-038b907a953d-scripts\") pod \"ovn-controller-hxr5k-config-bnsjx\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.710284 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a5ba9db2-5261-4e5d-8304-038b907a953d-var-run\") pod \"ovn-controller-hxr5k-config-bnsjx\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.811285 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a5ba9db2-5261-4e5d-8304-038b907a953d-var-run-ovn\") pod \"ovn-controller-hxr5k-config-bnsjx\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.811334 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2ntp\" (UniqueName: \"kubernetes.io/projected/a5ba9db2-5261-4e5d-8304-038b907a953d-kube-api-access-n2ntp\") pod \"ovn-controller-hxr5k-config-bnsjx\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.811364 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a5ba9db2-5261-4e5d-8304-038b907a953d-additional-scripts\") pod \"ovn-controller-hxr5k-config-bnsjx\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.811383 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a5ba9db2-5261-4e5d-8304-038b907a953d-var-log-ovn\") pod \"ovn-controller-hxr5k-config-bnsjx\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.811430 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a5ba9db2-5261-4e5d-8304-038b907a953d-scripts\") pod \"ovn-controller-hxr5k-config-bnsjx\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.811452 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a5ba9db2-5261-4e5d-8304-038b907a953d-var-run\") pod \"ovn-controller-hxr5k-config-bnsjx\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.811724 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a5ba9db2-5261-4e5d-8304-038b907a953d-var-run\") pod \"ovn-controller-hxr5k-config-bnsjx\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.811791 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a5ba9db2-5261-4e5d-8304-038b907a953d-var-log-ovn\") pod \"ovn-controller-hxr5k-config-bnsjx\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.811968 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a5ba9db2-5261-4e5d-8304-038b907a953d-var-run-ovn\") pod \"ovn-controller-hxr5k-config-bnsjx\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.812524 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a5ba9db2-5261-4e5d-8304-038b907a953d-additional-scripts\") pod \"ovn-controller-hxr5k-config-bnsjx\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.813885 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a5ba9db2-5261-4e5d-8304-038b907a953d-scripts\") pod \"ovn-controller-hxr5k-config-bnsjx\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.834589 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2ntp\" (UniqueName: \"kubernetes.io/projected/a5ba9db2-5261-4e5d-8304-038b907a953d-kube-api-access-n2ntp\") pod \"ovn-controller-hxr5k-config-bnsjx\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.937453 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-hxr5k" Dec 10 07:08:16 crc kubenswrapper[4765]: I1210 07:08:16.973324 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:17 crc kubenswrapper[4765]: I1210 07:08:17.484182 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hxr5k-config-bnsjx"] Dec 10 07:08:18 crc kubenswrapper[4765]: I1210 07:08:18.075280 4765 generic.go:334] "Generic (PLEG): container finished" podID="a5ba9db2-5261-4e5d-8304-038b907a953d" containerID="8883731c27445d441db7a1d384e0ba69d165eb04c1a3efec00afc4870eeec3e5" exitCode=0 Dec 10 07:08:18 crc kubenswrapper[4765]: I1210 07:08:18.075396 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxr5k-config-bnsjx" event={"ID":"a5ba9db2-5261-4e5d-8304-038b907a953d","Type":"ContainerDied","Data":"8883731c27445d441db7a1d384e0ba69d165eb04c1a3efec00afc4870eeec3e5"} Dec 10 07:08:18 crc kubenswrapper[4765]: I1210 07:08:18.075629 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxr5k-config-bnsjx" event={"ID":"a5ba9db2-5261-4e5d-8304-038b907a953d","Type":"ContainerStarted","Data":"340ac04c8d46c3ffe41631341f1492827a42b1c10f928807d87420811756d9b5"} Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.377660 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.558854 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a5ba9db2-5261-4e5d-8304-038b907a953d-scripts\") pod \"a5ba9db2-5261-4e5d-8304-038b907a953d\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.558947 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a5ba9db2-5261-4e5d-8304-038b907a953d-var-run\") pod \"a5ba9db2-5261-4e5d-8304-038b907a953d\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.558987 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a5ba9db2-5261-4e5d-8304-038b907a953d-var-log-ovn\") pod \"a5ba9db2-5261-4e5d-8304-038b907a953d\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.559017 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a5ba9db2-5261-4e5d-8304-038b907a953d-var-run-ovn\") pod \"a5ba9db2-5261-4e5d-8304-038b907a953d\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.559040 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2ntp\" (UniqueName: \"kubernetes.io/projected/a5ba9db2-5261-4e5d-8304-038b907a953d-kube-api-access-n2ntp\") pod \"a5ba9db2-5261-4e5d-8304-038b907a953d\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.559043 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a5ba9db2-5261-4e5d-8304-038b907a953d-var-run" (OuterVolumeSpecName: "var-run") pod "a5ba9db2-5261-4e5d-8304-038b907a953d" (UID: "a5ba9db2-5261-4e5d-8304-038b907a953d"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.559171 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a5ba9db2-5261-4e5d-8304-038b907a953d-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "a5ba9db2-5261-4e5d-8304-038b907a953d" (UID: "a5ba9db2-5261-4e5d-8304-038b907a953d"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.559171 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a5ba9db2-5261-4e5d-8304-038b907a953d-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "a5ba9db2-5261-4e5d-8304-038b907a953d" (UID: "a5ba9db2-5261-4e5d-8304-038b907a953d"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.559197 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a5ba9db2-5261-4e5d-8304-038b907a953d-additional-scripts\") pod \"a5ba9db2-5261-4e5d-8304-038b907a953d\" (UID: \"a5ba9db2-5261-4e5d-8304-038b907a953d\") " Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.559641 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5ba9db2-5261-4e5d-8304-038b907a953d-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "a5ba9db2-5261-4e5d-8304-038b907a953d" (UID: "a5ba9db2-5261-4e5d-8304-038b907a953d"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.559734 4765 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a5ba9db2-5261-4e5d-8304-038b907a953d-var-run\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.559753 4765 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a5ba9db2-5261-4e5d-8304-038b907a953d-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.559771 4765 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a5ba9db2-5261-4e5d-8304-038b907a953d-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.559919 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5ba9db2-5261-4e5d-8304-038b907a953d-scripts" (OuterVolumeSpecName: "scripts") pod "a5ba9db2-5261-4e5d-8304-038b907a953d" (UID: "a5ba9db2-5261-4e5d-8304-038b907a953d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.564744 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5ba9db2-5261-4e5d-8304-038b907a953d-kube-api-access-n2ntp" (OuterVolumeSpecName: "kube-api-access-n2ntp") pod "a5ba9db2-5261-4e5d-8304-038b907a953d" (UID: "a5ba9db2-5261-4e5d-8304-038b907a953d"). InnerVolumeSpecName "kube-api-access-n2ntp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.661612 4765 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a5ba9db2-5261-4e5d-8304-038b907a953d-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.661653 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a5ba9db2-5261-4e5d-8304-038b907a953d-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:19 crc kubenswrapper[4765]: I1210 07:08:19.661663 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2ntp\" (UniqueName: \"kubernetes.io/projected/a5ba9db2-5261-4e5d-8304-038b907a953d-kube-api-access-n2ntp\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:20 crc kubenswrapper[4765]: I1210 07:08:20.094141 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxr5k-config-bnsjx" event={"ID":"a5ba9db2-5261-4e5d-8304-038b907a953d","Type":"ContainerDied","Data":"340ac04c8d46c3ffe41631341f1492827a42b1c10f928807d87420811756d9b5"} Dec 10 07:08:20 crc kubenswrapper[4765]: I1210 07:08:20.094188 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="340ac04c8d46c3ffe41631341f1492827a42b1c10f928807d87420811756d9b5" Dec 10 07:08:20 crc kubenswrapper[4765]: I1210 07:08:20.094217 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxr5k-config-bnsjx" Dec 10 07:08:20 crc kubenswrapper[4765]: I1210 07:08:20.457552 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-hxr5k-config-bnsjx"] Dec 10 07:08:20 crc kubenswrapper[4765]: I1210 07:08:20.466355 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-hxr5k-config-bnsjx"] Dec 10 07:08:20 crc kubenswrapper[4765]: I1210 07:08:20.598802 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5ba9db2-5261-4e5d-8304-038b907a953d" path="/var/lib/kubelet/pods/a5ba9db2-5261-4e5d-8304-038b907a953d/volumes" Dec 10 07:08:21 crc kubenswrapper[4765]: I1210 07:08:21.102787 4765 generic.go:334] "Generic (PLEG): container finished" podID="045989ab-ad14-4ec7-adda-fcb6054f8b6e" containerID="ae0acddf4b81b4ef12582dae3c37d0db2b9a5b857d5bfe7687182a89daab11ff" exitCode=0 Dec 10 07:08:21 crc kubenswrapper[4765]: I1210 07:08:21.102818 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-2sjp2" event={"ID":"045989ab-ad14-4ec7-adda-fcb6054f8b6e","Type":"ContainerDied","Data":"ae0acddf4b81b4ef12582dae3c37d0db2b9a5b857d5bfe7687182a89daab11ff"} Dec 10 07:08:22 crc kubenswrapper[4765]: I1210 07:08:22.479395 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-2sjp2" Dec 10 07:08:22 crc kubenswrapper[4765]: I1210 07:08:22.511587 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/045989ab-ad14-4ec7-adda-fcb6054f8b6e-combined-ca-bundle\") pod \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\" (UID: \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\") " Dec 10 07:08:22 crc kubenswrapper[4765]: I1210 07:08:22.511664 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/045989ab-ad14-4ec7-adda-fcb6054f8b6e-config-data\") pod \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\" (UID: \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\") " Dec 10 07:08:22 crc kubenswrapper[4765]: I1210 07:08:22.511722 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/045989ab-ad14-4ec7-adda-fcb6054f8b6e-db-sync-config-data\") pod \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\" (UID: \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\") " Dec 10 07:08:22 crc kubenswrapper[4765]: I1210 07:08:22.511871 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrpnd\" (UniqueName: \"kubernetes.io/projected/045989ab-ad14-4ec7-adda-fcb6054f8b6e-kube-api-access-nrpnd\") pod \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\" (UID: \"045989ab-ad14-4ec7-adda-fcb6054f8b6e\") " Dec 10 07:08:22 crc kubenswrapper[4765]: I1210 07:08:22.517768 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/045989ab-ad14-4ec7-adda-fcb6054f8b6e-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "045989ab-ad14-4ec7-adda-fcb6054f8b6e" (UID: "045989ab-ad14-4ec7-adda-fcb6054f8b6e"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:08:22 crc kubenswrapper[4765]: I1210 07:08:22.517824 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/045989ab-ad14-4ec7-adda-fcb6054f8b6e-kube-api-access-nrpnd" (OuterVolumeSpecName: "kube-api-access-nrpnd") pod "045989ab-ad14-4ec7-adda-fcb6054f8b6e" (UID: "045989ab-ad14-4ec7-adda-fcb6054f8b6e"). InnerVolumeSpecName "kube-api-access-nrpnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:08:22 crc kubenswrapper[4765]: I1210 07:08:22.535047 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/045989ab-ad14-4ec7-adda-fcb6054f8b6e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "045989ab-ad14-4ec7-adda-fcb6054f8b6e" (UID: "045989ab-ad14-4ec7-adda-fcb6054f8b6e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:08:22 crc kubenswrapper[4765]: I1210 07:08:22.554767 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/045989ab-ad14-4ec7-adda-fcb6054f8b6e-config-data" (OuterVolumeSpecName: "config-data") pod "045989ab-ad14-4ec7-adda-fcb6054f8b6e" (UID: "045989ab-ad14-4ec7-adda-fcb6054f8b6e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:08:22 crc kubenswrapper[4765]: I1210 07:08:22.613546 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrpnd\" (UniqueName: \"kubernetes.io/projected/045989ab-ad14-4ec7-adda-fcb6054f8b6e-kube-api-access-nrpnd\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:22 crc kubenswrapper[4765]: I1210 07:08:22.613580 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/045989ab-ad14-4ec7-adda-fcb6054f8b6e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:22 crc kubenswrapper[4765]: I1210 07:08:22.613589 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/045989ab-ad14-4ec7-adda-fcb6054f8b6e-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:22 crc kubenswrapper[4765]: I1210 07:08:22.613598 4765 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/045989ab-ad14-4ec7-adda-fcb6054f8b6e-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.122944 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-2sjp2" event={"ID":"045989ab-ad14-4ec7-adda-fcb6054f8b6e","Type":"ContainerDied","Data":"650fdede7f295938e384b88baea48852295696b05abebdbe37400303e37af16e"} Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.122996 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="650fdede7f295938e384b88baea48852295696b05abebdbe37400303e37af16e" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.123007 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-2sjp2" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.540575 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79778dbd8c-9dw6f"] Dec 10 07:08:23 crc kubenswrapper[4765]: E1210 07:08:23.541021 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="045989ab-ad14-4ec7-adda-fcb6054f8b6e" containerName="glance-db-sync" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.541037 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="045989ab-ad14-4ec7-adda-fcb6054f8b6e" containerName="glance-db-sync" Dec 10 07:08:23 crc kubenswrapper[4765]: E1210 07:08:23.541067 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5ba9db2-5261-4e5d-8304-038b907a953d" containerName="ovn-config" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.541074 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5ba9db2-5261-4e5d-8304-038b907a953d" containerName="ovn-config" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.541318 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="045989ab-ad14-4ec7-adda-fcb6054f8b6e" containerName="glance-db-sync" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.541346 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5ba9db2-5261-4e5d-8304-038b907a953d" containerName="ovn-config" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.542496 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.551296 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79778dbd8c-9dw6f"] Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.631890 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-ovsdbserver-nb\") pod \"dnsmasq-dns-79778dbd8c-9dw6f\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.631982 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-dns-svc\") pod \"dnsmasq-dns-79778dbd8c-9dw6f\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.632075 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-config\") pod \"dnsmasq-dns-79778dbd8c-9dw6f\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.632249 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-ovsdbserver-sb\") pod \"dnsmasq-dns-79778dbd8c-9dw6f\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.632273 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dl585\" (UniqueName: \"kubernetes.io/projected/c997c61a-e224-4bdf-b7ce-e1683985a3b2-kube-api-access-dl585\") pod \"dnsmasq-dns-79778dbd8c-9dw6f\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.733560 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-config\") pod \"dnsmasq-dns-79778dbd8c-9dw6f\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.733658 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-ovsdbserver-sb\") pod \"dnsmasq-dns-79778dbd8c-9dw6f\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.733684 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dl585\" (UniqueName: \"kubernetes.io/projected/c997c61a-e224-4bdf-b7ce-e1683985a3b2-kube-api-access-dl585\") pod \"dnsmasq-dns-79778dbd8c-9dw6f\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.733709 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-ovsdbserver-nb\") pod \"dnsmasq-dns-79778dbd8c-9dw6f\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.733745 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-dns-svc\") pod \"dnsmasq-dns-79778dbd8c-9dw6f\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.734831 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-ovsdbserver-sb\") pod \"dnsmasq-dns-79778dbd8c-9dw6f\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.734869 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-dns-svc\") pod \"dnsmasq-dns-79778dbd8c-9dw6f\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.735487 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-ovsdbserver-nb\") pod \"dnsmasq-dns-79778dbd8c-9dw6f\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.736263 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-config\") pod \"dnsmasq-dns-79778dbd8c-9dw6f\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.758211 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dl585\" (UniqueName: \"kubernetes.io/projected/c997c61a-e224-4bdf-b7ce-e1683985a3b2-kube-api-access-dl585\") pod \"dnsmasq-dns-79778dbd8c-9dw6f\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:23 crc kubenswrapper[4765]: I1210 07:08:23.863384 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:24 crc kubenswrapper[4765]: I1210 07:08:24.373490 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79778dbd8c-9dw6f"] Dec 10 07:08:25 crc kubenswrapper[4765]: I1210 07:08:25.147133 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" event={"ID":"c997c61a-e224-4bdf-b7ce-e1683985a3b2","Type":"ContainerStarted","Data":"86b8a27c3026be68c6ef4b9da6e986b9946e12405d20b4931ababe7732fc9dc1"} Dec 10 07:08:26 crc kubenswrapper[4765]: I1210 07:08:26.159042 4765 generic.go:334] "Generic (PLEG): container finished" podID="c997c61a-e224-4bdf-b7ce-e1683985a3b2" containerID="f860e2778014e1dafda7ef1de8ff21722953addca2ab6e1293384fc997df2228" exitCode=0 Dec 10 07:08:26 crc kubenswrapper[4765]: I1210 07:08:26.159136 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" event={"ID":"c997c61a-e224-4bdf-b7ce-e1683985a3b2","Type":"ContainerDied","Data":"f860e2778014e1dafda7ef1de8ff21722953addca2ab6e1293384fc997df2228"} Dec 10 07:08:26 crc kubenswrapper[4765]: I1210 07:08:26.587292 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 10 07:08:26 crc kubenswrapper[4765]: I1210 07:08:26.932663 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-8zzkf"] Dec 10 07:08:26 crc kubenswrapper[4765]: I1210 07:08:26.934417 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8zzkf" Dec 10 07:08:26 crc kubenswrapper[4765]: I1210 07:08:26.952911 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-8zzkf"] Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.009587 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5a7a5c7-95da-49fe-ae5f-f3423f347fed-operator-scripts\") pod \"barbican-db-create-8zzkf\" (UID: \"d5a7a5c7-95da-49fe-ae5f-f3423f347fed\") " pod="openstack/barbican-db-create-8zzkf" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.009689 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtf7l\" (UniqueName: \"kubernetes.io/projected/d5a7a5c7-95da-49fe-ae5f-f3423f347fed-kube-api-access-rtf7l\") pod \"barbican-db-create-8zzkf\" (UID: \"d5a7a5c7-95da-49fe-ae5f-f3423f347fed\") " pod="openstack/barbican-db-create-8zzkf" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.039698 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-8fh8t"] Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.041198 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-8fh8t" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.082338 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-8fh8t"] Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.113053 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5a7a5c7-95da-49fe-ae5f-f3423f347fed-operator-scripts\") pod \"barbican-db-create-8zzkf\" (UID: \"d5a7a5c7-95da-49fe-ae5f-f3423f347fed\") " pod="openstack/barbican-db-create-8zzkf" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.113727 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtf7l\" (UniqueName: \"kubernetes.io/projected/d5a7a5c7-95da-49fe-ae5f-f3423f347fed-kube-api-access-rtf7l\") pod \"barbican-db-create-8zzkf\" (UID: \"d5a7a5c7-95da-49fe-ae5f-f3423f347fed\") " pod="openstack/barbican-db-create-8zzkf" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.113858 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5a7a5c7-95da-49fe-ae5f-f3423f347fed-operator-scripts\") pod \"barbican-db-create-8zzkf\" (UID: \"d5a7a5c7-95da-49fe-ae5f-f3423f347fed\") " pod="openstack/barbican-db-create-8zzkf" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.147205 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-1abb-account-create-update-2dn48"] Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.148290 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtf7l\" (UniqueName: \"kubernetes.io/projected/d5a7a5c7-95da-49fe-ae5f-f3423f347fed-kube-api-access-rtf7l\") pod \"barbican-db-create-8zzkf\" (UID: \"d5a7a5c7-95da-49fe-ae5f-f3423f347fed\") " pod="openstack/barbican-db-create-8zzkf" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.148559 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1abb-account-create-update-2dn48" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.151832 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.154998 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1abb-account-create-update-2dn48"] Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.181119 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" event={"ID":"c997c61a-e224-4bdf-b7ce-e1683985a3b2","Type":"ContainerStarted","Data":"7142c04cc4e57c825e867cdb342a3a40fc9b114254c516d77632e8e8eeccce3b"} Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.182105 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.211480 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" podStartSLOduration=4.211464926 podStartE2EDuration="4.211464926s" podCreationTimestamp="2025-12-10 07:08:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:08:27.210511929 +0000 UTC m=+1226.937177245" watchObservedRunningTime="2025-12-10 07:08:27.211464926 +0000 UTC m=+1226.938130242" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.216045 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a5a49f7e-a63c-4d23-ad8d-bc876ddf5786-operator-scripts\") pod \"cinder-db-create-8fh8t\" (UID: \"a5a49f7e-a63c-4d23-ad8d-bc876ddf5786\") " pod="openstack/cinder-db-create-8fh8t" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.216126 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjrrq\" (UniqueName: \"kubernetes.io/projected/a5a49f7e-a63c-4d23-ad8d-bc876ddf5786-kube-api-access-wjrrq\") pod \"cinder-db-create-8fh8t\" (UID: \"a5a49f7e-a63c-4d23-ad8d-bc876ddf5786\") " pod="openstack/cinder-db-create-8fh8t" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.241041 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-43b0-account-create-update-29n95"] Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.242977 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-43b0-account-create-update-29n95" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.246834 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.255783 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8zzkf" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.261886 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-43b0-account-create-update-29n95"] Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.322951 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8g9h7\" (UniqueName: \"kubernetes.io/projected/c7783f54-a81e-4b16-b37e-fa6d74c0d6f9-kube-api-access-8g9h7\") pod \"cinder-1abb-account-create-update-2dn48\" (UID: \"c7783f54-a81e-4b16-b37e-fa6d74c0d6f9\") " pod="openstack/cinder-1abb-account-create-update-2dn48" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.323126 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a5a49f7e-a63c-4d23-ad8d-bc876ddf5786-operator-scripts\") pod \"cinder-db-create-8fh8t\" (UID: \"a5a49f7e-a63c-4d23-ad8d-bc876ddf5786\") " pod="openstack/cinder-db-create-8fh8t" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.323293 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7783f54-a81e-4b16-b37e-fa6d74c0d6f9-operator-scripts\") pod \"cinder-1abb-account-create-update-2dn48\" (UID: \"c7783f54-a81e-4b16-b37e-fa6d74c0d6f9\") " pod="openstack/cinder-1abb-account-create-update-2dn48" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.323386 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjrrq\" (UniqueName: \"kubernetes.io/projected/a5a49f7e-a63c-4d23-ad8d-bc876ddf5786-kube-api-access-wjrrq\") pod \"cinder-db-create-8fh8t\" (UID: \"a5a49f7e-a63c-4d23-ad8d-bc876ddf5786\") " pod="openstack/cinder-db-create-8fh8t" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.324006 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a5a49f7e-a63c-4d23-ad8d-bc876ddf5786-operator-scripts\") pod \"cinder-db-create-8fh8t\" (UID: \"a5a49f7e-a63c-4d23-ad8d-bc876ddf5786\") " pod="openstack/cinder-db-create-8fh8t" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.346138 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjrrq\" (UniqueName: \"kubernetes.io/projected/a5a49f7e-a63c-4d23-ad8d-bc876ddf5786-kube-api-access-wjrrq\") pod \"cinder-db-create-8fh8t\" (UID: \"a5a49f7e-a63c-4d23-ad8d-bc876ddf5786\") " pod="openstack/cinder-db-create-8fh8t" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.396621 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-8fh8t" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.435004 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-nwv46"] Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.436103 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-nwv46" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.437265 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8g9h7\" (UniqueName: \"kubernetes.io/projected/c7783f54-a81e-4b16-b37e-fa6d74c0d6f9-kube-api-access-8g9h7\") pod \"cinder-1abb-account-create-update-2dn48\" (UID: \"c7783f54-a81e-4b16-b37e-fa6d74c0d6f9\") " pod="openstack/cinder-1abb-account-create-update-2dn48" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.437318 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13147cfa-7543-4a9b-a97b-8a83abf32e0b-operator-scripts\") pod \"barbican-43b0-account-create-update-29n95\" (UID: \"13147cfa-7543-4a9b-a97b-8a83abf32e0b\") " pod="openstack/barbican-43b0-account-create-update-29n95" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.437370 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kwnc\" (UniqueName: \"kubernetes.io/projected/13147cfa-7543-4a9b-a97b-8a83abf32e0b-kube-api-access-8kwnc\") pod \"barbican-43b0-account-create-update-29n95\" (UID: \"13147cfa-7543-4a9b-a97b-8a83abf32e0b\") " pod="openstack/barbican-43b0-account-create-update-29n95" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.437397 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7783f54-a81e-4b16-b37e-fa6d74c0d6f9-operator-scripts\") pod \"cinder-1abb-account-create-update-2dn48\" (UID: \"c7783f54-a81e-4b16-b37e-fa6d74c0d6f9\") " pod="openstack/cinder-1abb-account-create-update-2dn48" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.442270 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7783f54-a81e-4b16-b37e-fa6d74c0d6f9-operator-scripts\") pod \"cinder-1abb-account-create-update-2dn48\" (UID: \"c7783f54-a81e-4b16-b37e-fa6d74c0d6f9\") " pod="openstack/cinder-1abb-account-create-update-2dn48" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.456802 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-nwv46"] Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.500480 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8g9h7\" (UniqueName: \"kubernetes.io/projected/c7783f54-a81e-4b16-b37e-fa6d74c0d6f9-kube-api-access-8g9h7\") pod \"cinder-1abb-account-create-update-2dn48\" (UID: \"c7783f54-a81e-4b16-b37e-fa6d74c0d6f9\") " pod="openstack/cinder-1abb-account-create-update-2dn48" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.523581 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1abb-account-create-update-2dn48" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.534112 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-7wlqz"] Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.535621 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-7wlqz" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.537714 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.540462 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.540727 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.541004 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-vn8ql" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.542284 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af358477-7483-4e80-b209-e7991328cbb1-operator-scripts\") pod \"neutron-db-create-nwv46\" (UID: \"af358477-7483-4e80-b209-e7991328cbb1\") " pod="openstack/neutron-db-create-nwv46" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.542469 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13147cfa-7543-4a9b-a97b-8a83abf32e0b-operator-scripts\") pod \"barbican-43b0-account-create-update-29n95\" (UID: \"13147cfa-7543-4a9b-a97b-8a83abf32e0b\") " pod="openstack/barbican-43b0-account-create-update-29n95" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.542519 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bm95f\" (UniqueName: \"kubernetes.io/projected/af358477-7483-4e80-b209-e7991328cbb1-kube-api-access-bm95f\") pod \"neutron-db-create-nwv46\" (UID: \"af358477-7483-4e80-b209-e7991328cbb1\") " pod="openstack/neutron-db-create-nwv46" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.542595 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kwnc\" (UniqueName: \"kubernetes.io/projected/13147cfa-7543-4a9b-a97b-8a83abf32e0b-kube-api-access-8kwnc\") pod \"barbican-43b0-account-create-update-29n95\" (UID: \"13147cfa-7543-4a9b-a97b-8a83abf32e0b\") " pod="openstack/barbican-43b0-account-create-update-29n95" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.544441 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13147cfa-7543-4a9b-a97b-8a83abf32e0b-operator-scripts\") pod \"barbican-43b0-account-create-update-29n95\" (UID: \"13147cfa-7543-4a9b-a97b-8a83abf32e0b\") " pod="openstack/barbican-43b0-account-create-update-29n95" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.583270 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-7wlqz"] Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.599797 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-3778-account-create-update-78648"] Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.605499 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3778-account-create-update-78648" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.608044 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.631578 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kwnc\" (UniqueName: \"kubernetes.io/projected/13147cfa-7543-4a9b-a97b-8a83abf32e0b-kube-api-access-8kwnc\") pod \"barbican-43b0-account-create-update-29n95\" (UID: \"13147cfa-7543-4a9b-a97b-8a83abf32e0b\") " pod="openstack/barbican-43b0-account-create-update-29n95" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.641160 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-3778-account-create-update-78648"] Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.649364 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zd8d\" (UniqueName: \"kubernetes.io/projected/054ef117-bca3-4fe9-aeb9-caf1deba8d5e-kube-api-access-9zd8d\") pod \"keystone-db-sync-7wlqz\" (UID: \"054ef117-bca3-4fe9-aeb9-caf1deba8d5e\") " pod="openstack/keystone-db-sync-7wlqz" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.649487 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bm95f\" (UniqueName: \"kubernetes.io/projected/af358477-7483-4e80-b209-e7991328cbb1-kube-api-access-bm95f\") pod \"neutron-db-create-nwv46\" (UID: \"af358477-7483-4e80-b209-e7991328cbb1\") " pod="openstack/neutron-db-create-nwv46" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.649591 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/054ef117-bca3-4fe9-aeb9-caf1deba8d5e-combined-ca-bundle\") pod \"keystone-db-sync-7wlqz\" (UID: \"054ef117-bca3-4fe9-aeb9-caf1deba8d5e\") " pod="openstack/keystone-db-sync-7wlqz" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.649638 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af358477-7483-4e80-b209-e7991328cbb1-operator-scripts\") pod \"neutron-db-create-nwv46\" (UID: \"af358477-7483-4e80-b209-e7991328cbb1\") " pod="openstack/neutron-db-create-nwv46" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.649667 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/054ef117-bca3-4fe9-aeb9-caf1deba8d5e-config-data\") pod \"keystone-db-sync-7wlqz\" (UID: \"054ef117-bca3-4fe9-aeb9-caf1deba8d5e\") " pod="openstack/keystone-db-sync-7wlqz" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.730366 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af358477-7483-4e80-b209-e7991328cbb1-operator-scripts\") pod \"neutron-db-create-nwv46\" (UID: \"af358477-7483-4e80-b209-e7991328cbb1\") " pod="openstack/neutron-db-create-nwv46" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.751920 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61dce5ef-c4c0-4880-ab5c-87391d092897-operator-scripts\") pod \"neutron-3778-account-create-update-78648\" (UID: \"61dce5ef-c4c0-4880-ab5c-87391d092897\") " pod="openstack/neutron-3778-account-create-update-78648" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.752358 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zd8d\" (UniqueName: \"kubernetes.io/projected/054ef117-bca3-4fe9-aeb9-caf1deba8d5e-kube-api-access-9zd8d\") pod \"keystone-db-sync-7wlqz\" (UID: \"054ef117-bca3-4fe9-aeb9-caf1deba8d5e\") " pod="openstack/keystone-db-sync-7wlqz" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.752524 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcbgw\" (UniqueName: \"kubernetes.io/projected/61dce5ef-c4c0-4880-ab5c-87391d092897-kube-api-access-jcbgw\") pod \"neutron-3778-account-create-update-78648\" (UID: \"61dce5ef-c4c0-4880-ab5c-87391d092897\") " pod="openstack/neutron-3778-account-create-update-78648" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.752582 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/054ef117-bca3-4fe9-aeb9-caf1deba8d5e-combined-ca-bundle\") pod \"keystone-db-sync-7wlqz\" (UID: \"054ef117-bca3-4fe9-aeb9-caf1deba8d5e\") " pod="openstack/keystone-db-sync-7wlqz" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.752613 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/054ef117-bca3-4fe9-aeb9-caf1deba8d5e-config-data\") pod \"keystone-db-sync-7wlqz\" (UID: \"054ef117-bca3-4fe9-aeb9-caf1deba8d5e\") " pod="openstack/keystone-db-sync-7wlqz" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.761950 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bm95f\" (UniqueName: \"kubernetes.io/projected/af358477-7483-4e80-b209-e7991328cbb1-kube-api-access-bm95f\") pod \"neutron-db-create-nwv46\" (UID: \"af358477-7483-4e80-b209-e7991328cbb1\") " pod="openstack/neutron-db-create-nwv46" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.766252 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/054ef117-bca3-4fe9-aeb9-caf1deba8d5e-combined-ca-bundle\") pod \"keystone-db-sync-7wlqz\" (UID: \"054ef117-bca3-4fe9-aeb9-caf1deba8d5e\") " pod="openstack/keystone-db-sync-7wlqz" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.775789 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/054ef117-bca3-4fe9-aeb9-caf1deba8d5e-config-data\") pod \"keystone-db-sync-7wlqz\" (UID: \"054ef117-bca3-4fe9-aeb9-caf1deba8d5e\") " pod="openstack/keystone-db-sync-7wlqz" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.781686 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zd8d\" (UniqueName: \"kubernetes.io/projected/054ef117-bca3-4fe9-aeb9-caf1deba8d5e-kube-api-access-9zd8d\") pod \"keystone-db-sync-7wlqz\" (UID: \"054ef117-bca3-4fe9-aeb9-caf1deba8d5e\") " pod="openstack/keystone-db-sync-7wlqz" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.802962 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-8zzkf"] Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.854272 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61dce5ef-c4c0-4880-ab5c-87391d092897-operator-scripts\") pod \"neutron-3778-account-create-update-78648\" (UID: \"61dce5ef-c4c0-4880-ab5c-87391d092897\") " pod="openstack/neutron-3778-account-create-update-78648" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.854386 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcbgw\" (UniqueName: \"kubernetes.io/projected/61dce5ef-c4c0-4880-ab5c-87391d092897-kube-api-access-jcbgw\") pod \"neutron-3778-account-create-update-78648\" (UID: \"61dce5ef-c4c0-4880-ab5c-87391d092897\") " pod="openstack/neutron-3778-account-create-update-78648" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.856192 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61dce5ef-c4c0-4880-ab5c-87391d092897-operator-scripts\") pod \"neutron-3778-account-create-update-78648\" (UID: \"61dce5ef-c4c0-4880-ab5c-87391d092897\") " pod="openstack/neutron-3778-account-create-update-78648" Dec 10 07:08:27 crc kubenswrapper[4765]: I1210 07:08:27.863663 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-43b0-account-create-update-29n95" Dec 10 07:08:28 crc kubenswrapper[4765]: I1210 07:08:27.891014 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcbgw\" (UniqueName: \"kubernetes.io/projected/61dce5ef-c4c0-4880-ab5c-87391d092897-kube-api-access-jcbgw\") pod \"neutron-3778-account-create-update-78648\" (UID: \"61dce5ef-c4c0-4880-ab5c-87391d092897\") " pod="openstack/neutron-3778-account-create-update-78648" Dec 10 07:08:28 crc kubenswrapper[4765]: I1210 07:08:27.916681 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-nwv46" Dec 10 07:08:28 crc kubenswrapper[4765]: I1210 07:08:27.938733 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-7wlqz" Dec 10 07:08:28 crc kubenswrapper[4765]: I1210 07:08:28.072460 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3778-account-create-update-78648" Dec 10 07:08:28 crc kubenswrapper[4765]: I1210 07:08:28.215441 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-8zzkf" event={"ID":"d5a7a5c7-95da-49fe-ae5f-f3423f347fed","Type":"ContainerStarted","Data":"eeb9c53b18d871f227280cbde0cecce096816d3dd73dfa02c863075aedd774ab"} Dec 10 07:08:28 crc kubenswrapper[4765]: I1210 07:08:28.374350 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-8fh8t"] Dec 10 07:08:29 crc kubenswrapper[4765]: I1210 07:08:29.265121 4765 generic.go:334] "Generic (PLEG): container finished" podID="a5a49f7e-a63c-4d23-ad8d-bc876ddf5786" containerID="263abf314f478f45b775bde64ca3c733619ab1cf63ef4298b6c94c3c370e9f3f" exitCode=0 Dec 10 07:08:29 crc kubenswrapper[4765]: I1210 07:08:29.265521 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-8fh8t" event={"ID":"a5a49f7e-a63c-4d23-ad8d-bc876ddf5786","Type":"ContainerDied","Data":"263abf314f478f45b775bde64ca3c733619ab1cf63ef4298b6c94c3c370e9f3f"} Dec 10 07:08:29 crc kubenswrapper[4765]: I1210 07:08:29.265549 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-8fh8t" event={"ID":"a5a49f7e-a63c-4d23-ad8d-bc876ddf5786","Type":"ContainerStarted","Data":"6c2f6abe591d1b415a054ce8f0bd2e0c480bbdb4d533262dbe2b79c0bcbc8943"} Dec 10 07:08:29 crc kubenswrapper[4765]: I1210 07:08:29.277514 4765 generic.go:334] "Generic (PLEG): container finished" podID="d5a7a5c7-95da-49fe-ae5f-f3423f347fed" containerID="ebee5b572293df3bad7525159545677a6dfe7871f82acb881e53bcdf150ed6ff" exitCode=0 Dec 10 07:08:29 crc kubenswrapper[4765]: I1210 07:08:29.278235 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-8zzkf" event={"ID":"d5a7a5c7-95da-49fe-ae5f-f3423f347fed","Type":"ContainerDied","Data":"ebee5b572293df3bad7525159545677a6dfe7871f82acb881e53bcdf150ed6ff"} Dec 10 07:08:29 crc kubenswrapper[4765]: I1210 07:08:29.290768 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-7wlqz"] Dec 10 07:08:29 crc kubenswrapper[4765]: I1210 07:08:29.404433 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1abb-account-create-update-2dn48"] Dec 10 07:08:29 crc kubenswrapper[4765]: I1210 07:08:29.430065 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-43b0-account-create-update-29n95"] Dec 10 07:08:29 crc kubenswrapper[4765]: W1210 07:08:29.435647 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13147cfa_7543_4a9b_a97b_8a83abf32e0b.slice/crio-da54d40c45658f575511aa674376ec5ef2205b899c641533cddc313c7cb35e13 WatchSource:0}: Error finding container da54d40c45658f575511aa674376ec5ef2205b899c641533cddc313c7cb35e13: Status 404 returned error can't find the container with id da54d40c45658f575511aa674376ec5ef2205b899c641533cddc313c7cb35e13 Dec 10 07:08:29 crc kubenswrapper[4765]: I1210 07:08:29.457205 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-3778-account-create-update-78648"] Dec 10 07:08:29 crc kubenswrapper[4765]: W1210 07:08:29.462462 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf358477_7483_4e80_b209_e7991328cbb1.slice/crio-83ba995ec29e870d56f8971fc166b093d2d0751fc316e670db9e327edad07c61 WatchSource:0}: Error finding container 83ba995ec29e870d56f8971fc166b093d2d0751fc316e670db9e327edad07c61: Status 404 returned error can't find the container with id 83ba995ec29e870d56f8971fc166b093d2d0751fc316e670db9e327edad07c61 Dec 10 07:08:29 crc kubenswrapper[4765]: I1210 07:08:29.468474 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-nwv46"] Dec 10 07:08:30 crc kubenswrapper[4765]: I1210 07:08:30.293147 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-7wlqz" event={"ID":"054ef117-bca3-4fe9-aeb9-caf1deba8d5e","Type":"ContainerStarted","Data":"a91cdb85591c3e83324f3b3fba1232210b9a874d590d53dacde39f7d5ecee479"} Dec 10 07:08:30 crc kubenswrapper[4765]: I1210 07:08:30.295279 4765 generic.go:334] "Generic (PLEG): container finished" podID="af358477-7483-4e80-b209-e7991328cbb1" containerID="eb20cff81c1acd614b62ed6f6349b3d083cca964a8dbeea2d401c130ef2da951" exitCode=0 Dec 10 07:08:30 crc kubenswrapper[4765]: I1210 07:08:30.295378 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-nwv46" event={"ID":"af358477-7483-4e80-b209-e7991328cbb1","Type":"ContainerDied","Data":"eb20cff81c1acd614b62ed6f6349b3d083cca964a8dbeea2d401c130ef2da951"} Dec 10 07:08:30 crc kubenswrapper[4765]: I1210 07:08:30.295422 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-nwv46" event={"ID":"af358477-7483-4e80-b209-e7991328cbb1","Type":"ContainerStarted","Data":"83ba995ec29e870d56f8971fc166b093d2d0751fc316e670db9e327edad07c61"} Dec 10 07:08:30 crc kubenswrapper[4765]: I1210 07:08:30.297354 4765 generic.go:334] "Generic (PLEG): container finished" podID="61dce5ef-c4c0-4880-ab5c-87391d092897" containerID="bac4c71f55ee2170776df4ad5fb3c4f988f9f3170c3b9b43cdcdd7750110de7c" exitCode=0 Dec 10 07:08:30 crc kubenswrapper[4765]: I1210 07:08:30.297461 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3778-account-create-update-78648" event={"ID":"61dce5ef-c4c0-4880-ab5c-87391d092897","Type":"ContainerDied","Data":"bac4c71f55ee2170776df4ad5fb3c4f988f9f3170c3b9b43cdcdd7750110de7c"} Dec 10 07:08:30 crc kubenswrapper[4765]: I1210 07:08:30.297506 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3778-account-create-update-78648" event={"ID":"61dce5ef-c4c0-4880-ab5c-87391d092897","Type":"ContainerStarted","Data":"19fd6d5d4550a04bc398a970102470af0febbf2c310da76e0715cce4bceab706"} Dec 10 07:08:30 crc kubenswrapper[4765]: I1210 07:08:30.300390 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1abb-account-create-update-2dn48" event={"ID":"c7783f54-a81e-4b16-b37e-fa6d74c0d6f9","Type":"ContainerDied","Data":"832d47af8585309a92afe68f7c5380a57965390e6c2e78285a08914067a956d5"} Dec 10 07:08:30 crc kubenswrapper[4765]: I1210 07:08:30.299831 4765 generic.go:334] "Generic (PLEG): container finished" podID="c7783f54-a81e-4b16-b37e-fa6d74c0d6f9" containerID="832d47af8585309a92afe68f7c5380a57965390e6c2e78285a08914067a956d5" exitCode=0 Dec 10 07:08:30 crc kubenswrapper[4765]: I1210 07:08:30.302533 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1abb-account-create-update-2dn48" event={"ID":"c7783f54-a81e-4b16-b37e-fa6d74c0d6f9","Type":"ContainerStarted","Data":"fdb238944a53878530072de5108d58be00989ba03acfdccb56ddf9e1adfe06fc"} Dec 10 07:08:30 crc kubenswrapper[4765]: I1210 07:08:30.308871 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-43b0-account-create-update-29n95" event={"ID":"13147cfa-7543-4a9b-a97b-8a83abf32e0b","Type":"ContainerStarted","Data":"536abbad4e2842f7a03f4571a7f1c4dd328f7409b95bd63648e17108f89b6c6c"} Dec 10 07:08:30 crc kubenswrapper[4765]: I1210 07:08:30.308957 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-43b0-account-create-update-29n95" event={"ID":"13147cfa-7543-4a9b-a97b-8a83abf32e0b","Type":"ContainerStarted","Data":"da54d40c45658f575511aa674376ec5ef2205b899c641533cddc313c7cb35e13"} Dec 10 07:08:30 crc kubenswrapper[4765]: I1210 07:08:30.961984 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8zzkf" Dec 10 07:08:30 crc kubenswrapper[4765]: I1210 07:08:30.970319 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-8fh8t" Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.080187 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtf7l\" (UniqueName: \"kubernetes.io/projected/d5a7a5c7-95da-49fe-ae5f-f3423f347fed-kube-api-access-rtf7l\") pod \"d5a7a5c7-95da-49fe-ae5f-f3423f347fed\" (UID: \"d5a7a5c7-95da-49fe-ae5f-f3423f347fed\") " Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.080247 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjrrq\" (UniqueName: \"kubernetes.io/projected/a5a49f7e-a63c-4d23-ad8d-bc876ddf5786-kube-api-access-wjrrq\") pod \"a5a49f7e-a63c-4d23-ad8d-bc876ddf5786\" (UID: \"a5a49f7e-a63c-4d23-ad8d-bc876ddf5786\") " Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.080872 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a5a49f7e-a63c-4d23-ad8d-bc876ddf5786-operator-scripts\") pod \"a5a49f7e-a63c-4d23-ad8d-bc876ddf5786\" (UID: \"a5a49f7e-a63c-4d23-ad8d-bc876ddf5786\") " Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.081146 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5a7a5c7-95da-49fe-ae5f-f3423f347fed-operator-scripts\") pod \"d5a7a5c7-95da-49fe-ae5f-f3423f347fed\" (UID: \"d5a7a5c7-95da-49fe-ae5f-f3423f347fed\") " Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.081787 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5a49f7e-a63c-4d23-ad8d-bc876ddf5786-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a5a49f7e-a63c-4d23-ad8d-bc876ddf5786" (UID: "a5a49f7e-a63c-4d23-ad8d-bc876ddf5786"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.081892 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5a7a5c7-95da-49fe-ae5f-f3423f347fed-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d5a7a5c7-95da-49fe-ae5f-f3423f347fed" (UID: "d5a7a5c7-95da-49fe-ae5f-f3423f347fed"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.083338 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a5a49f7e-a63c-4d23-ad8d-bc876ddf5786-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.083364 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5a7a5c7-95da-49fe-ae5f-f3423f347fed-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.102191 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5a7a5c7-95da-49fe-ae5f-f3423f347fed-kube-api-access-rtf7l" (OuterVolumeSpecName: "kube-api-access-rtf7l") pod "d5a7a5c7-95da-49fe-ae5f-f3423f347fed" (UID: "d5a7a5c7-95da-49fe-ae5f-f3423f347fed"). InnerVolumeSpecName "kube-api-access-rtf7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.102286 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5a49f7e-a63c-4d23-ad8d-bc876ddf5786-kube-api-access-wjrrq" (OuterVolumeSpecName: "kube-api-access-wjrrq") pod "a5a49f7e-a63c-4d23-ad8d-bc876ddf5786" (UID: "a5a49f7e-a63c-4d23-ad8d-bc876ddf5786"). InnerVolumeSpecName "kube-api-access-wjrrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.184654 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtf7l\" (UniqueName: \"kubernetes.io/projected/d5a7a5c7-95da-49fe-ae5f-f3423f347fed-kube-api-access-rtf7l\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.184693 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjrrq\" (UniqueName: \"kubernetes.io/projected/a5a49f7e-a63c-4d23-ad8d-bc876ddf5786-kube-api-access-wjrrq\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.319867 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-8fh8t" Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.319854 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-8fh8t" event={"ID":"a5a49f7e-a63c-4d23-ad8d-bc876ddf5786","Type":"ContainerDied","Data":"6c2f6abe591d1b415a054ce8f0bd2e0c480bbdb4d533262dbe2b79c0bcbc8943"} Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.320006 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c2f6abe591d1b415a054ce8f0bd2e0c480bbdb4d533262dbe2b79c0bcbc8943" Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.326267 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-8zzkf" event={"ID":"d5a7a5c7-95da-49fe-ae5f-f3423f347fed","Type":"ContainerDied","Data":"eeb9c53b18d871f227280cbde0cecce096816d3dd73dfa02c863075aedd774ab"} Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.326312 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eeb9c53b18d871f227280cbde0cecce096816d3dd73dfa02c863075aedd774ab" Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.326280 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8zzkf" Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.327782 4765 generic.go:334] "Generic (PLEG): container finished" podID="13147cfa-7543-4a9b-a97b-8a83abf32e0b" containerID="536abbad4e2842f7a03f4571a7f1c4dd328f7409b95bd63648e17108f89b6c6c" exitCode=0 Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.328019 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-43b0-account-create-update-29n95" event={"ID":"13147cfa-7543-4a9b-a97b-8a83abf32e0b","Type":"ContainerDied","Data":"536abbad4e2842f7a03f4571a7f1c4dd328f7409b95bd63648e17108f89b6c6c"} Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.749304 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1abb-account-create-update-2dn48" Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.796592 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7783f54-a81e-4b16-b37e-fa6d74c0d6f9-operator-scripts\") pod \"c7783f54-a81e-4b16-b37e-fa6d74c0d6f9\" (UID: \"c7783f54-a81e-4b16-b37e-fa6d74c0d6f9\") " Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.796800 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8g9h7\" (UniqueName: \"kubernetes.io/projected/c7783f54-a81e-4b16-b37e-fa6d74c0d6f9-kube-api-access-8g9h7\") pod \"c7783f54-a81e-4b16-b37e-fa6d74c0d6f9\" (UID: \"c7783f54-a81e-4b16-b37e-fa6d74c0d6f9\") " Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.798266 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7783f54-a81e-4b16-b37e-fa6d74c0d6f9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c7783f54-a81e-4b16-b37e-fa6d74c0d6f9" (UID: "c7783f54-a81e-4b16-b37e-fa6d74c0d6f9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.807048 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7783f54-a81e-4b16-b37e-fa6d74c0d6f9-kube-api-access-8g9h7" (OuterVolumeSpecName: "kube-api-access-8g9h7") pod "c7783f54-a81e-4b16-b37e-fa6d74c0d6f9" (UID: "c7783f54-a81e-4b16-b37e-fa6d74c0d6f9"). InnerVolumeSpecName "kube-api-access-8g9h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.898595 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7783f54-a81e-4b16-b37e-fa6d74c0d6f9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:31 crc kubenswrapper[4765]: I1210 07:08:31.898898 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8g9h7\" (UniqueName: \"kubernetes.io/projected/c7783f54-a81e-4b16-b37e-fa6d74c0d6f9-kube-api-access-8g9h7\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.211302 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-nwv46" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.244287 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-43b0-account-create-update-29n95" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.264434 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3778-account-create-update-78648" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.311618 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af358477-7483-4e80-b209-e7991328cbb1-operator-scripts\") pod \"af358477-7483-4e80-b209-e7991328cbb1\" (UID: \"af358477-7483-4e80-b209-e7991328cbb1\") " Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.311806 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61dce5ef-c4c0-4880-ab5c-87391d092897-operator-scripts\") pod \"61dce5ef-c4c0-4880-ab5c-87391d092897\" (UID: \"61dce5ef-c4c0-4880-ab5c-87391d092897\") " Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.311914 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bm95f\" (UniqueName: \"kubernetes.io/projected/af358477-7483-4e80-b209-e7991328cbb1-kube-api-access-bm95f\") pod \"af358477-7483-4e80-b209-e7991328cbb1\" (UID: \"af358477-7483-4e80-b209-e7991328cbb1\") " Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.311958 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8kwnc\" (UniqueName: \"kubernetes.io/projected/13147cfa-7543-4a9b-a97b-8a83abf32e0b-kube-api-access-8kwnc\") pod \"13147cfa-7543-4a9b-a97b-8a83abf32e0b\" (UID: \"13147cfa-7543-4a9b-a97b-8a83abf32e0b\") " Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.312191 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13147cfa-7543-4a9b-a97b-8a83abf32e0b-operator-scripts\") pod \"13147cfa-7543-4a9b-a97b-8a83abf32e0b\" (UID: \"13147cfa-7543-4a9b-a97b-8a83abf32e0b\") " Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.312394 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af358477-7483-4e80-b209-e7991328cbb1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "af358477-7483-4e80-b209-e7991328cbb1" (UID: "af358477-7483-4e80-b209-e7991328cbb1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.312760 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61dce5ef-c4c0-4880-ab5c-87391d092897-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "61dce5ef-c4c0-4880-ab5c-87391d092897" (UID: "61dce5ef-c4c0-4880-ab5c-87391d092897"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.312997 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13147cfa-7543-4a9b-a97b-8a83abf32e0b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "13147cfa-7543-4a9b-a97b-8a83abf32e0b" (UID: "13147cfa-7543-4a9b-a97b-8a83abf32e0b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.313255 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcbgw\" (UniqueName: \"kubernetes.io/projected/61dce5ef-c4c0-4880-ab5c-87391d092897-kube-api-access-jcbgw\") pod \"61dce5ef-c4c0-4880-ab5c-87391d092897\" (UID: \"61dce5ef-c4c0-4880-ab5c-87391d092897\") " Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.314790 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af358477-7483-4e80-b209-e7991328cbb1-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.314814 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61dce5ef-c4c0-4880-ab5c-87391d092897-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.314828 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13147cfa-7543-4a9b-a97b-8a83abf32e0b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.319331 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13147cfa-7543-4a9b-a97b-8a83abf32e0b-kube-api-access-8kwnc" (OuterVolumeSpecName: "kube-api-access-8kwnc") pod "13147cfa-7543-4a9b-a97b-8a83abf32e0b" (UID: "13147cfa-7543-4a9b-a97b-8a83abf32e0b"). InnerVolumeSpecName "kube-api-access-8kwnc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.320761 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af358477-7483-4e80-b209-e7991328cbb1-kube-api-access-bm95f" (OuterVolumeSpecName: "kube-api-access-bm95f") pod "af358477-7483-4e80-b209-e7991328cbb1" (UID: "af358477-7483-4e80-b209-e7991328cbb1"). InnerVolumeSpecName "kube-api-access-bm95f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.340357 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61dce5ef-c4c0-4880-ab5c-87391d092897-kube-api-access-jcbgw" (OuterVolumeSpecName: "kube-api-access-jcbgw") pod "61dce5ef-c4c0-4880-ab5c-87391d092897" (UID: "61dce5ef-c4c0-4880-ab5c-87391d092897"). InnerVolumeSpecName "kube-api-access-jcbgw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.342305 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-nwv46" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.342570 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-nwv46" event={"ID":"af358477-7483-4e80-b209-e7991328cbb1","Type":"ContainerDied","Data":"83ba995ec29e870d56f8971fc166b093d2d0751fc316e670db9e327edad07c61"} Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.342642 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83ba995ec29e870d56f8971fc166b093d2d0751fc316e670db9e327edad07c61" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.346168 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3778-account-create-update-78648" event={"ID":"61dce5ef-c4c0-4880-ab5c-87391d092897","Type":"ContainerDied","Data":"19fd6d5d4550a04bc398a970102470af0febbf2c310da76e0715cce4bceab706"} Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.346221 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19fd6d5d4550a04bc398a970102470af0febbf2c310da76e0715cce4bceab706" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.346190 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3778-account-create-update-78648" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.364394 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1abb-account-create-update-2dn48" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.364638 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1abb-account-create-update-2dn48" event={"ID":"c7783f54-a81e-4b16-b37e-fa6d74c0d6f9","Type":"ContainerDied","Data":"fdb238944a53878530072de5108d58be00989ba03acfdccb56ddf9e1adfe06fc"} Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.364678 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fdb238944a53878530072de5108d58be00989ba03acfdccb56ddf9e1adfe06fc" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.369016 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-43b0-account-create-update-29n95" event={"ID":"13147cfa-7543-4a9b-a97b-8a83abf32e0b","Type":"ContainerDied","Data":"da54d40c45658f575511aa674376ec5ef2205b899c641533cddc313c7cb35e13"} Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.369052 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da54d40c45658f575511aa674376ec5ef2205b899c641533cddc313c7cb35e13" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.369129 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-43b0-account-create-update-29n95" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.415882 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcbgw\" (UniqueName: \"kubernetes.io/projected/61dce5ef-c4c0-4880-ab5c-87391d092897-kube-api-access-jcbgw\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.415904 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bm95f\" (UniqueName: \"kubernetes.io/projected/af358477-7483-4e80-b209-e7991328cbb1-kube-api-access-bm95f\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:32 crc kubenswrapper[4765]: I1210 07:08:32.415916 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8kwnc\" (UniqueName: \"kubernetes.io/projected/13147cfa-7543-4a9b-a97b-8a83abf32e0b-kube-api-access-8kwnc\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:33 crc kubenswrapper[4765]: I1210 07:08:33.865967 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:08:33 crc kubenswrapper[4765]: I1210 07:08:33.937508 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cb545bd4c-8dhgg"] Dec 10 07:08:33 crc kubenswrapper[4765]: I1210 07:08:33.937772 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" podUID="e849d8f0-dbfc-4708-8c74-be5f6a594304" containerName="dnsmasq-dns" containerID="cri-o://872d3c1338bf9552ba1b0ae26f5b306fd06424196f95fee421e4bc0c8dcbdf54" gracePeriod=10 Dec 10 07:08:34 crc kubenswrapper[4765]: I1210 07:08:34.390692 4765 generic.go:334] "Generic (PLEG): container finished" podID="e849d8f0-dbfc-4708-8c74-be5f6a594304" containerID="872d3c1338bf9552ba1b0ae26f5b306fd06424196f95fee421e4bc0c8dcbdf54" exitCode=0 Dec 10 07:08:34 crc kubenswrapper[4765]: I1210 07:08:34.390755 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" event={"ID":"e849d8f0-dbfc-4708-8c74-be5f6a594304","Type":"ContainerDied","Data":"872d3c1338bf9552ba1b0ae26f5b306fd06424196f95fee421e4bc0c8dcbdf54"} Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.381677 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.427966 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" event={"ID":"e849d8f0-dbfc-4708-8c74-be5f6a594304","Type":"ContainerDied","Data":"de52b13cfa1f01d053f8072829326b2c4aa1ee4e5074440efa4f59bf8e342fbc"} Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.428018 4765 scope.go:117] "RemoveContainer" containerID="872d3c1338bf9552ba1b0ae26f5b306fd06424196f95fee421e4bc0c8dcbdf54" Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.428159 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cb545bd4c-8dhgg" Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.501573 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2wdd\" (UniqueName: \"kubernetes.io/projected/e849d8f0-dbfc-4708-8c74-be5f6a594304-kube-api-access-s2wdd\") pod \"e849d8f0-dbfc-4708-8c74-be5f6a594304\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.501652 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-ovsdbserver-nb\") pod \"e849d8f0-dbfc-4708-8c74-be5f6a594304\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.501702 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-config\") pod \"e849d8f0-dbfc-4708-8c74-be5f6a594304\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.501748 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-ovsdbserver-sb\") pod \"e849d8f0-dbfc-4708-8c74-be5f6a594304\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.501854 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-dns-svc\") pod \"e849d8f0-dbfc-4708-8c74-be5f6a594304\" (UID: \"e849d8f0-dbfc-4708-8c74-be5f6a594304\") " Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.506387 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e849d8f0-dbfc-4708-8c74-be5f6a594304-kube-api-access-s2wdd" (OuterVolumeSpecName: "kube-api-access-s2wdd") pod "e849d8f0-dbfc-4708-8c74-be5f6a594304" (UID: "e849d8f0-dbfc-4708-8c74-be5f6a594304"). InnerVolumeSpecName "kube-api-access-s2wdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.522373 4765 scope.go:117] "RemoveContainer" containerID="8f77e877be4ae67a1e616cfbc9d4ea6a3bf230029111adcee5a6ed06371acc89" Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.546934 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e849d8f0-dbfc-4708-8c74-be5f6a594304" (UID: "e849d8f0-dbfc-4708-8c74-be5f6a594304"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.555269 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-config" (OuterVolumeSpecName: "config") pod "e849d8f0-dbfc-4708-8c74-be5f6a594304" (UID: "e849d8f0-dbfc-4708-8c74-be5f6a594304"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.559674 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e849d8f0-dbfc-4708-8c74-be5f6a594304" (UID: "e849d8f0-dbfc-4708-8c74-be5f6a594304"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.568834 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e849d8f0-dbfc-4708-8c74-be5f6a594304" (UID: "e849d8f0-dbfc-4708-8c74-be5f6a594304"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.605206 4765 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.605645 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2wdd\" (UniqueName: \"kubernetes.io/projected/e849d8f0-dbfc-4708-8c74-be5f6a594304-kube-api-access-s2wdd\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.605658 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.605670 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.605683 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e849d8f0-dbfc-4708-8c74-be5f6a594304-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.754500 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cb545bd4c-8dhgg"] Dec 10 07:08:36 crc kubenswrapper[4765]: I1210 07:08:36.761153 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6cb545bd4c-8dhgg"] Dec 10 07:08:37 crc kubenswrapper[4765]: I1210 07:08:37.437913 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-7wlqz" event={"ID":"054ef117-bca3-4fe9-aeb9-caf1deba8d5e","Type":"ContainerStarted","Data":"985c248c95292c43eae0dc35b6f8e1697298363ff6eac21ef9a3b020a947a8f6"} Dec 10 07:08:37 crc kubenswrapper[4765]: I1210 07:08:37.458652 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-7wlqz" podStartSLOduration=3.339463529 podStartE2EDuration="10.458635455s" podCreationTimestamp="2025-12-10 07:08:27 +0000 UTC" firstStartedPulling="2025-12-10 07:08:29.293160751 +0000 UTC m=+1229.019826067" lastFinishedPulling="2025-12-10 07:08:36.412332677 +0000 UTC m=+1236.138997993" observedRunningTime="2025-12-10 07:08:37.455071023 +0000 UTC m=+1237.181736339" watchObservedRunningTime="2025-12-10 07:08:37.458635455 +0000 UTC m=+1237.185300761" Dec 10 07:08:38 crc kubenswrapper[4765]: I1210 07:08:38.601502 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e849d8f0-dbfc-4708-8c74-be5f6a594304" path="/var/lib/kubelet/pods/e849d8f0-dbfc-4708-8c74-be5f6a594304/volumes" Dec 10 07:08:42 crc kubenswrapper[4765]: I1210 07:08:42.486020 4765 generic.go:334] "Generic (PLEG): container finished" podID="054ef117-bca3-4fe9-aeb9-caf1deba8d5e" containerID="985c248c95292c43eae0dc35b6f8e1697298363ff6eac21ef9a3b020a947a8f6" exitCode=0 Dec 10 07:08:42 crc kubenswrapper[4765]: I1210 07:08:42.486127 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-7wlqz" event={"ID":"054ef117-bca3-4fe9-aeb9-caf1deba8d5e","Type":"ContainerDied","Data":"985c248c95292c43eae0dc35b6f8e1697298363ff6eac21ef9a3b020a947a8f6"} Dec 10 07:08:43 crc kubenswrapper[4765]: I1210 07:08:43.917792 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-7wlqz" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.080894 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/054ef117-bca3-4fe9-aeb9-caf1deba8d5e-config-data\") pod \"054ef117-bca3-4fe9-aeb9-caf1deba8d5e\" (UID: \"054ef117-bca3-4fe9-aeb9-caf1deba8d5e\") " Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.080995 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zd8d\" (UniqueName: \"kubernetes.io/projected/054ef117-bca3-4fe9-aeb9-caf1deba8d5e-kube-api-access-9zd8d\") pod \"054ef117-bca3-4fe9-aeb9-caf1deba8d5e\" (UID: \"054ef117-bca3-4fe9-aeb9-caf1deba8d5e\") " Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.081068 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/054ef117-bca3-4fe9-aeb9-caf1deba8d5e-combined-ca-bundle\") pod \"054ef117-bca3-4fe9-aeb9-caf1deba8d5e\" (UID: \"054ef117-bca3-4fe9-aeb9-caf1deba8d5e\") " Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.087715 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/054ef117-bca3-4fe9-aeb9-caf1deba8d5e-kube-api-access-9zd8d" (OuterVolumeSpecName: "kube-api-access-9zd8d") pod "054ef117-bca3-4fe9-aeb9-caf1deba8d5e" (UID: "054ef117-bca3-4fe9-aeb9-caf1deba8d5e"). InnerVolumeSpecName "kube-api-access-9zd8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.109852 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/054ef117-bca3-4fe9-aeb9-caf1deba8d5e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "054ef117-bca3-4fe9-aeb9-caf1deba8d5e" (UID: "054ef117-bca3-4fe9-aeb9-caf1deba8d5e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.131777 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/054ef117-bca3-4fe9-aeb9-caf1deba8d5e-config-data" (OuterVolumeSpecName: "config-data") pod "054ef117-bca3-4fe9-aeb9-caf1deba8d5e" (UID: "054ef117-bca3-4fe9-aeb9-caf1deba8d5e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.183967 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/054ef117-bca3-4fe9-aeb9-caf1deba8d5e-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.184025 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zd8d\" (UniqueName: \"kubernetes.io/projected/054ef117-bca3-4fe9-aeb9-caf1deba8d5e-kube-api-access-9zd8d\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.184043 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/054ef117-bca3-4fe9-aeb9-caf1deba8d5e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.504491 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-7wlqz" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.504385 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-7wlqz" event={"ID":"054ef117-bca3-4fe9-aeb9-caf1deba8d5e","Type":"ContainerDied","Data":"a91cdb85591c3e83324f3b3fba1232210b9a874d590d53dacde39f7d5ecee479"} Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.505202 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a91cdb85591c3e83324f3b3fba1232210b9a874d590d53dacde39f7d5ecee479" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.776315 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b5fd8bb79-q8kl8"] Dec 10 07:08:44 crc kubenswrapper[4765]: E1210 07:08:44.776999 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e849d8f0-dbfc-4708-8c74-be5f6a594304" containerName="dnsmasq-dns" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.777068 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e849d8f0-dbfc-4708-8c74-be5f6a594304" containerName="dnsmasq-dns" Dec 10 07:08:44 crc kubenswrapper[4765]: E1210 07:08:44.777142 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5a49f7e-a63c-4d23-ad8d-bc876ddf5786" containerName="mariadb-database-create" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.777209 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5a49f7e-a63c-4d23-ad8d-bc876ddf5786" containerName="mariadb-database-create" Dec 10 07:08:44 crc kubenswrapper[4765]: E1210 07:08:44.777260 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e849d8f0-dbfc-4708-8c74-be5f6a594304" containerName="init" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.777314 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e849d8f0-dbfc-4708-8c74-be5f6a594304" containerName="init" Dec 10 07:08:44 crc kubenswrapper[4765]: E1210 07:08:44.777359 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5a7a5c7-95da-49fe-ae5f-f3423f347fed" containerName="mariadb-database-create" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.777412 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5a7a5c7-95da-49fe-ae5f-f3423f347fed" containerName="mariadb-database-create" Dec 10 07:08:44 crc kubenswrapper[4765]: E1210 07:08:44.777467 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="054ef117-bca3-4fe9-aeb9-caf1deba8d5e" containerName="keystone-db-sync" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.777517 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="054ef117-bca3-4fe9-aeb9-caf1deba8d5e" containerName="keystone-db-sync" Dec 10 07:08:44 crc kubenswrapper[4765]: E1210 07:08:44.777569 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af358477-7483-4e80-b209-e7991328cbb1" containerName="mariadb-database-create" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.777614 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="af358477-7483-4e80-b209-e7991328cbb1" containerName="mariadb-database-create" Dec 10 07:08:44 crc kubenswrapper[4765]: E1210 07:08:44.777659 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61dce5ef-c4c0-4880-ab5c-87391d092897" containerName="mariadb-account-create-update" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.777710 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="61dce5ef-c4c0-4880-ab5c-87391d092897" containerName="mariadb-account-create-update" Dec 10 07:08:44 crc kubenswrapper[4765]: E1210 07:08:44.777757 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7783f54-a81e-4b16-b37e-fa6d74c0d6f9" containerName="mariadb-account-create-update" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.777831 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7783f54-a81e-4b16-b37e-fa6d74c0d6f9" containerName="mariadb-account-create-update" Dec 10 07:08:44 crc kubenswrapper[4765]: E1210 07:08:44.777893 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13147cfa-7543-4a9b-a97b-8a83abf32e0b" containerName="mariadb-account-create-update" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.777942 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="13147cfa-7543-4a9b-a97b-8a83abf32e0b" containerName="mariadb-account-create-update" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.778173 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7783f54-a81e-4b16-b37e-fa6d74c0d6f9" containerName="mariadb-account-create-update" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.778426 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5a7a5c7-95da-49fe-ae5f-f3423f347fed" containerName="mariadb-database-create" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.778487 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="054ef117-bca3-4fe9-aeb9-caf1deba8d5e" containerName="keystone-db-sync" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.778581 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="61dce5ef-c4c0-4880-ab5c-87391d092897" containerName="mariadb-account-create-update" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.778641 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="e849d8f0-dbfc-4708-8c74-be5f6a594304" containerName="dnsmasq-dns" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.778702 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5a49f7e-a63c-4d23-ad8d-bc876ddf5786" containerName="mariadb-database-create" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.778761 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="13147cfa-7543-4a9b-a97b-8a83abf32e0b" containerName="mariadb-account-create-update" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.778819 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="af358477-7483-4e80-b209-e7991328cbb1" containerName="mariadb-database-create" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.779749 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.824126 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b5fd8bb79-q8kl8"] Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.859840 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-ctfh6"] Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.861115 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.869814 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.869843 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.870151 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.869915 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.870428 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-vn8ql" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.881855 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-ctfh6"] Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.902757 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-ovsdbserver-sb\") pod \"dnsmasq-dns-5b5fd8bb79-q8kl8\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.902825 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngqd4\" (UniqueName: \"kubernetes.io/projected/705f6293-03e9-424e-9627-d82cf8493610-kube-api-access-ngqd4\") pod \"dnsmasq-dns-5b5fd8bb79-q8kl8\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.902851 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-dns-svc\") pod \"dnsmasq-dns-5b5fd8bb79-q8kl8\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.902887 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-config\") pod \"dnsmasq-dns-5b5fd8bb79-q8kl8\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:44 crc kubenswrapper[4765]: I1210 07:08:44.902954 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-ovsdbserver-nb\") pod \"dnsmasq-dns-5b5fd8bb79-q8kl8\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.004676 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-combined-ca-bundle\") pod \"keystone-bootstrap-ctfh6\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.004729 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-ovsdbserver-nb\") pod \"dnsmasq-dns-5b5fd8bb79-q8kl8\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.004756 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58mjk\" (UniqueName: \"kubernetes.io/projected/485152ae-56fe-420a-8d8a-3127b6357103-kube-api-access-58mjk\") pod \"keystone-bootstrap-ctfh6\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.004775 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-config-data\") pod \"keystone-bootstrap-ctfh6\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.004945 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-ovsdbserver-sb\") pod \"dnsmasq-dns-5b5fd8bb79-q8kl8\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.005009 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-scripts\") pod \"keystone-bootstrap-ctfh6\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.005143 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngqd4\" (UniqueName: \"kubernetes.io/projected/705f6293-03e9-424e-9627-d82cf8493610-kube-api-access-ngqd4\") pod \"dnsmasq-dns-5b5fd8bb79-q8kl8\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.005176 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-dns-svc\") pod \"dnsmasq-dns-5b5fd8bb79-q8kl8\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.005207 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-credential-keys\") pod \"keystone-bootstrap-ctfh6\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.005236 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-fernet-keys\") pod \"keystone-bootstrap-ctfh6\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.005272 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-config\") pod \"dnsmasq-dns-5b5fd8bb79-q8kl8\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.005904 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-ovsdbserver-sb\") pod \"dnsmasq-dns-5b5fd8bb79-q8kl8\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.005953 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-ovsdbserver-nb\") pod \"dnsmasq-dns-5b5fd8bb79-q8kl8\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.006331 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-config\") pod \"dnsmasq-dns-5b5fd8bb79-q8kl8\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.006588 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-dns-svc\") pod \"dnsmasq-dns-5b5fd8bb79-q8kl8\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.049528 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.052024 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.054589 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.055317 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.068075 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-bxqxj"] Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.069421 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.074388 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-74px2" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.074708 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.075347 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.076991 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-bxqxj"] Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.085737 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.091311 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngqd4\" (UniqueName: \"kubernetes.io/projected/705f6293-03e9-424e-9627-d82cf8493610-kube-api-access-ngqd4\") pod \"dnsmasq-dns-5b5fd8bb79-q8kl8\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.106719 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-credential-keys\") pod \"keystone-bootstrap-ctfh6\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.106766 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-fernet-keys\") pod \"keystone-bootstrap-ctfh6\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.106845 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-combined-ca-bundle\") pod \"keystone-bootstrap-ctfh6\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.106868 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58mjk\" (UniqueName: \"kubernetes.io/projected/485152ae-56fe-420a-8d8a-3127b6357103-kube-api-access-58mjk\") pod \"keystone-bootstrap-ctfh6\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.106889 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-config-data\") pod \"keystone-bootstrap-ctfh6\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.106929 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-scripts\") pod \"keystone-bootstrap-ctfh6\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.113553 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-scripts\") pod \"keystone-bootstrap-ctfh6\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.119004 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-combined-ca-bundle\") pod \"keystone-bootstrap-ctfh6\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.122546 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.211751 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-fernet-keys\") pod \"keystone-bootstrap-ctfh6\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.227066 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-config-data\") pod \"keystone-bootstrap-ctfh6\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.227738 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-config-data\") pod \"cinder-db-sync-bxqxj\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.227820 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a408295-9dcc-4bde-8f4c-019bc7585479-log-httpd\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.227848 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-db-sync-config-data\") pod \"cinder-db-sync-bxqxj\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.227869 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.227896 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.227912 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-scripts\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.227926 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wzt5\" (UniqueName: \"kubernetes.io/projected/3a408295-9dcc-4bde-8f4c-019bc7585479-kube-api-access-4wzt5\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.227943 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/46580548-0bb0-4026-821b-2ee72fc56f70-etc-machine-id\") pod \"cinder-db-sync-bxqxj\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.227965 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ml6kc\" (UniqueName: \"kubernetes.io/projected/46580548-0bb0-4026-821b-2ee72fc56f70-kube-api-access-ml6kc\") pod \"cinder-db-sync-bxqxj\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.227981 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-scripts\") pod \"cinder-db-sync-bxqxj\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.228011 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-combined-ca-bundle\") pod \"cinder-db-sync-bxqxj\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.228054 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-config-data\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.228136 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a408295-9dcc-4bde-8f4c-019bc7585479-run-httpd\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.253582 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-credential-keys\") pod \"keystone-bootstrap-ctfh6\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.254175 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58mjk\" (UniqueName: \"kubernetes.io/projected/485152ae-56fe-420a-8d8a-3127b6357103-kube-api-access-58mjk\") pod \"keystone-bootstrap-ctfh6\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.305816 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-tjvqv"] Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.307058 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-tjvqv" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.324557 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-r8bsz" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.325130 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.325326 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.336793 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-combined-ca-bundle\") pod \"cinder-db-sync-bxqxj\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.336857 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-config-data\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.336914 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a408295-9dcc-4bde-8f4c-019bc7585479-run-httpd\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.336956 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-config-data\") pod \"cinder-db-sync-bxqxj\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.336987 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a408295-9dcc-4bde-8f4c-019bc7585479-log-httpd\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.337008 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-db-sync-config-data\") pod \"cinder-db-sync-bxqxj\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.337028 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.337054 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.337097 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-scripts\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.337113 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wzt5\" (UniqueName: \"kubernetes.io/projected/3a408295-9dcc-4bde-8f4c-019bc7585479-kube-api-access-4wzt5\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.337131 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/46580548-0bb0-4026-821b-2ee72fc56f70-etc-machine-id\") pod \"cinder-db-sync-bxqxj\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.337153 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ml6kc\" (UniqueName: \"kubernetes.io/projected/46580548-0bb0-4026-821b-2ee72fc56f70-kube-api-access-ml6kc\") pod \"cinder-db-sync-bxqxj\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.337168 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-scripts\") pod \"cinder-db-sync-bxqxj\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.345246 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a408295-9dcc-4bde-8f4c-019bc7585479-run-httpd\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.347394 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-tjvqv"] Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.348440 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a408295-9dcc-4bde-8f4c-019bc7585479-log-httpd\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.350676 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/46580548-0bb0-4026-821b-2ee72fc56f70-etc-machine-id\") pod \"cinder-db-sync-bxqxj\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.365973 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-combined-ca-bundle\") pod \"cinder-db-sync-bxqxj\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.367961 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.370156 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-config-data\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.371336 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.371963 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-scripts\") pod \"cinder-db-sync-bxqxj\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.372782 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-db-sync-config-data\") pod \"cinder-db-sync-bxqxj\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.419451 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-config-data\") pod \"cinder-db-sync-bxqxj\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.429454 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-scripts\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.441886 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wzt5\" (UniqueName: \"kubernetes.io/projected/3a408295-9dcc-4bde-8f4c-019bc7585479-kube-api-access-4wzt5\") pod \"ceilometer-0\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.442681 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzkvl\" (UniqueName: \"kubernetes.io/projected/ee0d3c38-da7c-46ad-ad72-5870e7b61db0-kube-api-access-mzkvl\") pod \"neutron-db-sync-tjvqv\" (UID: \"ee0d3c38-da7c-46ad-ad72-5870e7b61db0\") " pod="openstack/neutron-db-sync-tjvqv" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.442847 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0d3c38-da7c-46ad-ad72-5870e7b61db0-combined-ca-bundle\") pod \"neutron-db-sync-tjvqv\" (UID: \"ee0d3c38-da7c-46ad-ad72-5870e7b61db0\") " pod="openstack/neutron-db-sync-tjvqv" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.442972 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ee0d3c38-da7c-46ad-ad72-5870e7b61db0-config\") pod \"neutron-db-sync-tjvqv\" (UID: \"ee0d3c38-da7c-46ad-ad72-5870e7b61db0\") " pod="openstack/neutron-db-sync-tjvqv" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.456505 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ml6kc\" (UniqueName: \"kubernetes.io/projected/46580548-0bb0-4026-821b-2ee72fc56f70-kube-api-access-ml6kc\") pod \"cinder-db-sync-bxqxj\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.496253 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.531360 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b5fd8bb79-q8kl8"] Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.550195 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzkvl\" (UniqueName: \"kubernetes.io/projected/ee0d3c38-da7c-46ad-ad72-5870e7b61db0-kube-api-access-mzkvl\") pod \"neutron-db-sync-tjvqv\" (UID: \"ee0d3c38-da7c-46ad-ad72-5870e7b61db0\") " pod="openstack/neutron-db-sync-tjvqv" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.550610 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0d3c38-da7c-46ad-ad72-5870e7b61db0-combined-ca-bundle\") pod \"neutron-db-sync-tjvqv\" (UID: \"ee0d3c38-da7c-46ad-ad72-5870e7b61db0\") " pod="openstack/neutron-db-sync-tjvqv" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.550702 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ee0d3c38-da7c-46ad-ad72-5870e7b61db0-config\") pod \"neutron-db-sync-tjvqv\" (UID: \"ee0d3c38-da7c-46ad-ad72-5870e7b61db0\") " pod="openstack/neutron-db-sync-tjvqv" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.585173 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-2tnf4"] Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.586763 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2tnf4" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.593341 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ee0d3c38-da7c-46ad-ad72-5870e7b61db0-config\") pod \"neutron-db-sync-tjvqv\" (UID: \"ee0d3c38-da7c-46ad-ad72-5870e7b61db0\") " pod="openstack/neutron-db-sync-tjvqv" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.601843 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0d3c38-da7c-46ad-ad72-5870e7b61db0-combined-ca-bundle\") pod \"neutron-db-sync-tjvqv\" (UID: \"ee0d3c38-da7c-46ad-ad72-5870e7b61db0\") " pod="openstack/neutron-db-sync-tjvqv" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.604308 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.604787 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-cc9wl" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.611823 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-wlh48"] Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.613073 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-wlh48" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.615978 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.622006 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-2tnf4"] Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.622942 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-zvbg6" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.624853 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzkvl\" (UniqueName: \"kubernetes.io/projected/ee0d3c38-da7c-46ad-ad72-5870e7b61db0-kube-api-access-mzkvl\") pod \"neutron-db-sync-tjvqv\" (UID: \"ee0d3c38-da7c-46ad-ad72-5870e7b61db0\") " pod="openstack/neutron-db-sync-tjvqv" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.632265 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.638787 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-wlh48"] Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.655105 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74c95c887-pbmt2"] Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.665263 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb-combined-ca-bundle\") pod \"barbican-db-sync-2tnf4\" (UID: \"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb\") " pod="openstack/barbican-db-sync-2tnf4" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.670372 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb-db-sync-config-data\") pod \"barbican-db-sync-2tnf4\" (UID: \"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb\") " pod="openstack/barbican-db-sync-2tnf4" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.670526 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk92c\" (UniqueName: \"kubernetes.io/projected/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb-kube-api-access-mk92c\") pod \"barbican-db-sync-2tnf4\" (UID: \"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb\") " pod="openstack/barbican-db-sync-2tnf4" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.669816 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.680990 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74c95c887-pbmt2"] Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.681249 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.695854 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.777557 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-tjvqv" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.778147 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-config\") pod \"dnsmasq-dns-74c95c887-pbmt2\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.778227 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb-combined-ca-bundle\") pod \"barbican-db-sync-2tnf4\" (UID: \"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb\") " pod="openstack/barbican-db-sync-2tnf4" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.778261 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-ovsdbserver-sb\") pod \"dnsmasq-dns-74c95c887-pbmt2\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.778288 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb-db-sync-config-data\") pod \"barbican-db-sync-2tnf4\" (UID: \"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb\") " pod="openstack/barbican-db-sync-2tnf4" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.778316 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/182a289b-2c34-48a5-975d-94eccdf449fe-config-data\") pod \"placement-db-sync-wlh48\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " pod="openstack/placement-db-sync-wlh48" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.778336 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntjc2\" (UniqueName: \"kubernetes.io/projected/182a289b-2c34-48a5-975d-94eccdf449fe-kube-api-access-ntjc2\") pod \"placement-db-sync-wlh48\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " pod="openstack/placement-db-sync-wlh48" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.778355 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/182a289b-2c34-48a5-975d-94eccdf449fe-combined-ca-bundle\") pod \"placement-db-sync-wlh48\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " pod="openstack/placement-db-sync-wlh48" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.778389 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-dns-svc\") pod \"dnsmasq-dns-74c95c887-pbmt2\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.778405 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/182a289b-2c34-48a5-975d-94eccdf449fe-logs\") pod \"placement-db-sync-wlh48\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " pod="openstack/placement-db-sync-wlh48" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.778447 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvzh6\" (UniqueName: \"kubernetes.io/projected/fc44a504-e791-44c7-afc8-77d11d77b7a4-kube-api-access-cvzh6\") pod \"dnsmasq-dns-74c95c887-pbmt2\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.778498 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/182a289b-2c34-48a5-975d-94eccdf449fe-scripts\") pod \"placement-db-sync-wlh48\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " pod="openstack/placement-db-sync-wlh48" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.778539 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-ovsdbserver-nb\") pod \"dnsmasq-dns-74c95c887-pbmt2\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.778567 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk92c\" (UniqueName: \"kubernetes.io/projected/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb-kube-api-access-mk92c\") pod \"barbican-db-sync-2tnf4\" (UID: \"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb\") " pod="openstack/barbican-db-sync-2tnf4" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.792947 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb-combined-ca-bundle\") pod \"barbican-db-sync-2tnf4\" (UID: \"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb\") " pod="openstack/barbican-db-sync-2tnf4" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.804500 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb-db-sync-config-data\") pod \"barbican-db-sync-2tnf4\" (UID: \"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb\") " pod="openstack/barbican-db-sync-2tnf4" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.812972 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk92c\" (UniqueName: \"kubernetes.io/projected/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb-kube-api-access-mk92c\") pod \"barbican-db-sync-2tnf4\" (UID: \"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb\") " pod="openstack/barbican-db-sync-2tnf4" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.880011 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/182a289b-2c34-48a5-975d-94eccdf449fe-scripts\") pod \"placement-db-sync-wlh48\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " pod="openstack/placement-db-sync-wlh48" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.880107 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-ovsdbserver-nb\") pod \"dnsmasq-dns-74c95c887-pbmt2\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.880170 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-config\") pod \"dnsmasq-dns-74c95c887-pbmt2\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.880231 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-ovsdbserver-sb\") pod \"dnsmasq-dns-74c95c887-pbmt2\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.880278 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/182a289b-2c34-48a5-975d-94eccdf449fe-config-data\") pod \"placement-db-sync-wlh48\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " pod="openstack/placement-db-sync-wlh48" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.880299 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntjc2\" (UniqueName: \"kubernetes.io/projected/182a289b-2c34-48a5-975d-94eccdf449fe-kube-api-access-ntjc2\") pod \"placement-db-sync-wlh48\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " pod="openstack/placement-db-sync-wlh48" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.880319 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/182a289b-2c34-48a5-975d-94eccdf449fe-combined-ca-bundle\") pod \"placement-db-sync-wlh48\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " pod="openstack/placement-db-sync-wlh48" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.880354 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-dns-svc\") pod \"dnsmasq-dns-74c95c887-pbmt2\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.880393 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/182a289b-2c34-48a5-975d-94eccdf449fe-logs\") pod \"placement-db-sync-wlh48\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " pod="openstack/placement-db-sync-wlh48" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.880418 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvzh6\" (UniqueName: \"kubernetes.io/projected/fc44a504-e791-44c7-afc8-77d11d77b7a4-kube-api-access-cvzh6\") pod \"dnsmasq-dns-74c95c887-pbmt2\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.884772 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-dns-svc\") pod \"dnsmasq-dns-74c95c887-pbmt2\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.885363 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/182a289b-2c34-48a5-975d-94eccdf449fe-config-data\") pod \"placement-db-sync-wlh48\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " pod="openstack/placement-db-sync-wlh48" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.885418 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-config\") pod \"dnsmasq-dns-74c95c887-pbmt2\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.885868 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-ovsdbserver-nb\") pod \"dnsmasq-dns-74c95c887-pbmt2\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.887561 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/182a289b-2c34-48a5-975d-94eccdf449fe-scripts\") pod \"placement-db-sync-wlh48\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " pod="openstack/placement-db-sync-wlh48" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.888035 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-ovsdbserver-sb\") pod \"dnsmasq-dns-74c95c887-pbmt2\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.890644 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/182a289b-2c34-48a5-975d-94eccdf449fe-combined-ca-bundle\") pod \"placement-db-sync-wlh48\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " pod="openstack/placement-db-sync-wlh48" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.892191 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/182a289b-2c34-48a5-975d-94eccdf449fe-logs\") pod \"placement-db-sync-wlh48\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " pod="openstack/placement-db-sync-wlh48" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.928599 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvzh6\" (UniqueName: \"kubernetes.io/projected/fc44a504-e791-44c7-afc8-77d11d77b7a4-kube-api-access-cvzh6\") pod \"dnsmasq-dns-74c95c887-pbmt2\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.936868 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntjc2\" (UniqueName: \"kubernetes.io/projected/182a289b-2c34-48a5-975d-94eccdf449fe-kube-api-access-ntjc2\") pod \"placement-db-sync-wlh48\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " pod="openstack/placement-db-sync-wlh48" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.977809 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2tnf4" Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.991292 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 10 07:08:45 crc kubenswrapper[4765]: I1210 07:08:45.993435 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:45.999278 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:45.999589 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:45.999909 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.000073 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-g8f64" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.017220 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.060888 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-wlh48" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.086570 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-scripts\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.086640 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.086692 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.086732 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a7c39636-f699-42af-9568-2109bffb40d7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.086767 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.086800 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-config-data\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.086822 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxbb5\" (UniqueName: \"kubernetes.io/projected/a7c39636-f699-42af-9568-2109bffb40d7-kube-api-access-wxbb5\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.086874 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7c39636-f699-42af-9568-2109bffb40d7-logs\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.103689 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.107285 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.112525 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.119663 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.120676 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.137975 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.186377 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b5fd8bb79-q8kl8"] Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.188337 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7c39636-f699-42af-9568-2109bffb40d7-logs\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.188433 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.188541 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-scripts\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.188596 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.188626 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.188676 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb3cd78e-072c-493c-89a3-670da9deb4e2-logs\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.188708 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.188747 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8d7w\" (UniqueName: \"kubernetes.io/projected/fb3cd78e-072c-493c-89a3-670da9deb4e2-kube-api-access-z8d7w\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.188788 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a7c39636-f699-42af-9568-2109bffb40d7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.188821 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.188847 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fb3cd78e-072c-493c-89a3-670da9deb4e2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.188884 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.188913 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-config-data\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.188937 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxbb5\" (UniqueName: \"kubernetes.io/projected/a7c39636-f699-42af-9568-2109bffb40d7-kube-api-access-wxbb5\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.188969 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.189002 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.189944 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7c39636-f699-42af-9568-2109bffb40d7-logs\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.196571 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a7c39636-f699-42af-9568-2109bffb40d7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.196936 4765 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.213551 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-scripts\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.214333 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.230229 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.235833 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-ctfh6"] Dec 10 07:08:46 crc kubenswrapper[4765]: E1210 07:08:46.250558 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[etc-swift], unattached volumes=[], failed to process volumes=[]: context deadline exceeded" pod="openstack/swift-storage-0" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.246364 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-config-data\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.286802 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxbb5\" (UniqueName: \"kubernetes.io/projected/a7c39636-f699-42af-9568-2109bffb40d7-kube-api-access-wxbb5\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.291600 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.291695 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.291781 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.291810 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb3cd78e-072c-493c-89a3-670da9deb4e2-logs\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.291843 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8d7w\" (UniqueName: \"kubernetes.io/projected/fb3cd78e-072c-493c-89a3-670da9deb4e2-kube-api-access-z8d7w\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.291866 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.291888 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fb3cd78e-072c-493c-89a3-670da9deb4e2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.291928 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.292153 4765 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.292761 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb3cd78e-072c-493c-89a3-670da9deb4e2-logs\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.295384 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fb3cd78e-072c-493c-89a3-670da9deb4e2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.312279 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.313275 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.327170 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.340138 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8d7w\" (UniqueName: \"kubernetes.io/projected/fb3cd78e-072c-493c-89a3-670da9deb4e2-kube-api-access-z8d7w\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.340312 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.373238 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.373280 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.417696 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.528164 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-tjvqv"] Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.563899 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ctfh6" event={"ID":"485152ae-56fe-420a-8d8a-3127b6357103","Type":"ContainerStarted","Data":"d826a14039a80d51384ef2ea59ff5e4d86832e1c8c7615697a419ac0e8dc1160"} Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.568542 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" event={"ID":"705f6293-03e9-424e-9627-d82cf8493610","Type":"ContainerStarted","Data":"1fd7f7631e447836a7634c57197bf876faeab915ed3d84604023a9aa1358cbcd"} Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.581907 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-tjvqv" event={"ID":"ee0d3c38-da7c-46ad-ad72-5870e7b61db0","Type":"ContainerStarted","Data":"02e9818bc4428ec7b029632a5069a65af583c872747604531f1af4485faa7a01"} Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.606578 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-bxqxj"] Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.661260 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.740648 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.886751 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74c95c887-pbmt2"] Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.922706 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-wlh48"] Dec 10 07:08:46 crc kubenswrapper[4765]: I1210 07:08:46.979133 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-2tnf4"] Dec 10 07:08:47 crc kubenswrapper[4765]: I1210 07:08:47.379180 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 10 07:08:47 crc kubenswrapper[4765]: I1210 07:08:47.598998 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ctfh6" event={"ID":"485152ae-56fe-420a-8d8a-3127b6357103","Type":"ContainerStarted","Data":"20f0b6e27a7b4ff0b68bfdb9cb61de6e57e03e305bce2b157b1fc9b1c6cf96b1"} Dec 10 07:08:47 crc kubenswrapper[4765]: I1210 07:08:47.614533 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2tnf4" event={"ID":"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb","Type":"ContainerStarted","Data":"46e375e7e6d448485079afc2eaebe068ba737e8a04601aeb456cd6ab1e9cf8db"} Dec 10 07:08:47 crc kubenswrapper[4765]: I1210 07:08:47.626881 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-wlh48" event={"ID":"182a289b-2c34-48a5-975d-94eccdf449fe","Type":"ContainerStarted","Data":"c0a1e078075f53840c53422576f40ae63b5b0210472318d3ade80df382077dca"} Dec 10 07:08:47 crc kubenswrapper[4765]: I1210 07:08:47.627291 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-ctfh6" podStartSLOduration=3.627275195 podStartE2EDuration="3.627275195s" podCreationTimestamp="2025-12-10 07:08:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:08:47.623473657 +0000 UTC m=+1247.350138983" watchObservedRunningTime="2025-12-10 07:08:47.627275195 +0000 UTC m=+1247.353940511" Dec 10 07:08:47 crc kubenswrapper[4765]: I1210 07:08:47.634953 4765 generic.go:334] "Generic (PLEG): container finished" podID="fc44a504-e791-44c7-afc8-77d11d77b7a4" containerID="c78fba49464f6ea13dc04f9a4d0e76a90b7234c09df7beef6f3d8d29f5e5ba94" exitCode=0 Dec 10 07:08:47 crc kubenswrapper[4765]: I1210 07:08:47.635040 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74c95c887-pbmt2" event={"ID":"fc44a504-e791-44c7-afc8-77d11d77b7a4","Type":"ContainerDied","Data":"c78fba49464f6ea13dc04f9a4d0e76a90b7234c09df7beef6f3d8d29f5e5ba94"} Dec 10 07:08:47 crc kubenswrapper[4765]: I1210 07:08:47.635073 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74c95c887-pbmt2" event={"ID":"fc44a504-e791-44c7-afc8-77d11d77b7a4","Type":"ContainerStarted","Data":"b4ab5626ede4fd84840437858a78efe21a655c6f6dd6e125a2714aba6fddcc0c"} Dec 10 07:08:47 crc kubenswrapper[4765]: I1210 07:08:47.639187 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fb3cd78e-072c-493c-89a3-670da9deb4e2","Type":"ContainerStarted","Data":"a09abc2af2de765e9143aaf7cde81e6fca471fdedb30c17599eb8f68f87740d2"} Dec 10 07:08:47 crc kubenswrapper[4765]: I1210 07:08:47.660311 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-tjvqv" event={"ID":"ee0d3c38-da7c-46ad-ad72-5870e7b61db0","Type":"ContainerStarted","Data":"33a35f8c3534b6fc13d601dde4481aaa8bb1b8d1e0037c77c68ad4bee8e3a61a"} Dec 10 07:08:47 crc kubenswrapper[4765]: I1210 07:08:47.680640 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a408295-9dcc-4bde-8f4c-019bc7585479","Type":"ContainerStarted","Data":"8244c6398e56e3fdb56f085cc1c2b30653899e7475ff67fe0d072aab0ff34e6a"} Dec 10 07:08:47 crc kubenswrapper[4765]: I1210 07:08:47.684665 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-bxqxj" event={"ID":"46580548-0bb0-4026-821b-2ee72fc56f70","Type":"ContainerStarted","Data":"7018405f7fabdcffe58b847b6244611a4a7ff50430b347b085d4db5014961a15"} Dec 10 07:08:47 crc kubenswrapper[4765]: I1210 07:08:47.709722 4765 generic.go:334] "Generic (PLEG): container finished" podID="705f6293-03e9-424e-9627-d82cf8493610" containerID="ba48440f0952cfcaef8b804ae11b2867b92772b7106de7915a3d2c442f38121a" exitCode=0 Dec 10 07:08:47 crc kubenswrapper[4765]: I1210 07:08:47.709825 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" event={"ID":"705f6293-03e9-424e-9627-d82cf8493610","Type":"ContainerDied","Data":"ba48440f0952cfcaef8b804ae11b2867b92772b7106de7915a3d2c442f38121a"} Dec 10 07:08:47 crc kubenswrapper[4765]: I1210 07:08:47.715724 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-tjvqv" podStartSLOduration=2.7157069849999997 podStartE2EDuration="2.715706985s" podCreationTimestamp="2025-12-10 07:08:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:08:47.71095101 +0000 UTC m=+1247.437616326" watchObservedRunningTime="2025-12-10 07:08:47.715706985 +0000 UTC m=+1247.442372301" Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.191545 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.369449 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.398513 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-ovsdbserver-sb\") pod \"705f6293-03e9-424e-9627-d82cf8493610\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.398595 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-dns-svc\") pod \"705f6293-03e9-424e-9627-d82cf8493610\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.398628 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-ovsdbserver-nb\") pod \"705f6293-03e9-424e-9627-d82cf8493610\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.398682 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngqd4\" (UniqueName: \"kubernetes.io/projected/705f6293-03e9-424e-9627-d82cf8493610-kube-api-access-ngqd4\") pod \"705f6293-03e9-424e-9627-d82cf8493610\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.398840 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-config\") pod \"705f6293-03e9-424e-9627-d82cf8493610\" (UID: \"705f6293-03e9-424e-9627-d82cf8493610\") " Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.443309 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/705f6293-03e9-424e-9627-d82cf8493610-kube-api-access-ngqd4" (OuterVolumeSpecName: "kube-api-access-ngqd4") pod "705f6293-03e9-424e-9627-d82cf8493610" (UID: "705f6293-03e9-424e-9627-d82cf8493610"). InnerVolumeSpecName "kube-api-access-ngqd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.445557 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.496032 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "705f6293-03e9-424e-9627-d82cf8493610" (UID: "705f6293-03e9-424e-9627-d82cf8493610"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.543438 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngqd4\" (UniqueName: \"kubernetes.io/projected/705f6293-03e9-424e-9627-d82cf8493610-kube-api-access-ngqd4\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.543492 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.571582 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "705f6293-03e9-424e-9627-d82cf8493610" (UID: "705f6293-03e9-424e-9627-d82cf8493610"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.574947 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-config" (OuterVolumeSpecName: "config") pod "705f6293-03e9-424e-9627-d82cf8493610" (UID: "705f6293-03e9-424e-9627-d82cf8493610"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.592920 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "705f6293-03e9-424e-9627-d82cf8493610" (UID: "705f6293-03e9-424e-9627-d82cf8493610"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.650367 4765 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.650401 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.650414 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/705f6293-03e9-424e-9627-d82cf8493610-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.691641 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.724687 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.958477 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a7c39636-f699-42af-9568-2109bffb40d7","Type":"ContainerStarted","Data":"63d3331ac5634fd16f5354c368e54e6cb0f9d94ee3ef27a5849938c55542daa8"} Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.977753 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74c95c887-pbmt2" event={"ID":"fc44a504-e791-44c7-afc8-77d11d77b7a4","Type":"ContainerStarted","Data":"fcb34f95dac424c94080257451e042ae316feded1c01ee570341b28ae870a57d"} Dec 10 07:08:48 crc kubenswrapper[4765]: I1210 07:08:48.978056 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:49 crc kubenswrapper[4765]: I1210 07:08:49.015425 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" Dec 10 07:08:49 crc kubenswrapper[4765]: I1210 07:08:49.016195 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b5fd8bb79-q8kl8" event={"ID":"705f6293-03e9-424e-9627-d82cf8493610","Type":"ContainerDied","Data":"1fd7f7631e447836a7634c57197bf876faeab915ed3d84604023a9aa1358cbcd"} Dec 10 07:08:49 crc kubenswrapper[4765]: I1210 07:08:49.016271 4765 scope.go:117] "RemoveContainer" containerID="ba48440f0952cfcaef8b804ae11b2867b92772b7106de7915a3d2c442f38121a" Dec 10 07:08:49 crc kubenswrapper[4765]: I1210 07:08:49.037341 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74c95c887-pbmt2" podStartSLOduration=4.037314987 podStartE2EDuration="4.037314987s" podCreationTimestamp="2025-12-10 07:08:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:08:49.014951422 +0000 UTC m=+1248.741616758" watchObservedRunningTime="2025-12-10 07:08:49.037314987 +0000 UTC m=+1248.763980303" Dec 10 07:08:49 crc kubenswrapper[4765]: I1210 07:08:49.162895 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b5fd8bb79-q8kl8"] Dec 10 07:08:49 crc kubenswrapper[4765]: I1210 07:08:49.192575 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b5fd8bb79-q8kl8"] Dec 10 07:08:50 crc kubenswrapper[4765]: I1210 07:08:50.038831 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a7c39636-f699-42af-9568-2109bffb40d7","Type":"ContainerStarted","Data":"c635279a1f2665d647b895514bf3bb2413c60f81a4b49145d0270052ba4ce5ac"} Dec 10 07:08:50 crc kubenswrapper[4765]: I1210 07:08:50.068342 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fb3cd78e-072c-493c-89a3-670da9deb4e2","Type":"ContainerStarted","Data":"74c70b6818eaac07cb6638833abccdc5acd2fce487900e85b40c8033a3673191"} Dec 10 07:08:50 crc kubenswrapper[4765]: I1210 07:08:50.654836 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="705f6293-03e9-424e-9627-d82cf8493610" path="/var/lib/kubelet/pods/705f6293-03e9-424e-9627-d82cf8493610/volumes" Dec 10 07:08:51 crc kubenswrapper[4765]: I1210 07:08:51.095589 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fb3cd78e-072c-493c-89a3-670da9deb4e2","Type":"ContainerStarted","Data":"705c7b648240c1f3d9d43f56acbdd38076a00be7fcafe6e6bac1b7fea08bb9aa"} Dec 10 07:08:51 crc kubenswrapper[4765]: I1210 07:08:51.095861 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="fb3cd78e-072c-493c-89a3-670da9deb4e2" containerName="glance-log" containerID="cri-o://74c70b6818eaac07cb6638833abccdc5acd2fce487900e85b40c8033a3673191" gracePeriod=30 Dec 10 07:08:51 crc kubenswrapper[4765]: I1210 07:08:51.096178 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="fb3cd78e-072c-493c-89a3-670da9deb4e2" containerName="glance-httpd" containerID="cri-o://705c7b648240c1f3d9d43f56acbdd38076a00be7fcafe6e6bac1b7fea08bb9aa" gracePeriod=30 Dec 10 07:08:51 crc kubenswrapper[4765]: I1210 07:08:51.107876 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a7c39636-f699-42af-9568-2109bffb40d7","Type":"ContainerStarted","Data":"cbdc667ae6243dbfd3c0bda0c5b97556a05592e7dbc896f75f92df34258a3bda"} Dec 10 07:08:51 crc kubenswrapper[4765]: I1210 07:08:51.108041 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a7c39636-f699-42af-9568-2109bffb40d7" containerName="glance-log" containerID="cri-o://c635279a1f2665d647b895514bf3bb2413c60f81a4b49145d0270052ba4ce5ac" gracePeriod=30 Dec 10 07:08:51 crc kubenswrapper[4765]: I1210 07:08:51.108174 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a7c39636-f699-42af-9568-2109bffb40d7" containerName="glance-httpd" containerID="cri-o://cbdc667ae6243dbfd3c0bda0c5b97556a05592e7dbc896f75f92df34258a3bda" gracePeriod=30 Dec 10 07:08:51 crc kubenswrapper[4765]: I1210 07:08:51.145916 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.145883624 podStartE2EDuration="6.145883624s" podCreationTimestamp="2025-12-10 07:08:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:08:51.133112602 +0000 UTC m=+1250.859777938" watchObservedRunningTime="2025-12-10 07:08:51.145883624 +0000 UTC m=+1250.872548950" Dec 10 07:08:51 crc kubenswrapper[4765]: I1210 07:08:51.248190 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:08:51 crc kubenswrapper[4765]: I1210 07:08:51.258386 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift\") pod \"swift-storage-0\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " pod="openstack/swift-storage-0" Dec 10 07:08:52 crc kubenswrapper[4765]: I1210 07:08:52.119537 4765 generic.go:334] "Generic (PLEG): container finished" podID="a7c39636-f699-42af-9568-2109bffb40d7" containerID="cbdc667ae6243dbfd3c0bda0c5b97556a05592e7dbc896f75f92df34258a3bda" exitCode=143 Dec 10 07:08:52 crc kubenswrapper[4765]: I1210 07:08:52.119983 4765 generic.go:334] "Generic (PLEG): container finished" podID="a7c39636-f699-42af-9568-2109bffb40d7" containerID="c635279a1f2665d647b895514bf3bb2413c60f81a4b49145d0270052ba4ce5ac" exitCode=143 Dec 10 07:08:52 crc kubenswrapper[4765]: I1210 07:08:52.119728 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a7c39636-f699-42af-9568-2109bffb40d7","Type":"ContainerDied","Data":"cbdc667ae6243dbfd3c0bda0c5b97556a05592e7dbc896f75f92df34258a3bda"} Dec 10 07:08:52 crc kubenswrapper[4765]: I1210 07:08:52.120070 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a7c39636-f699-42af-9568-2109bffb40d7","Type":"ContainerDied","Data":"c635279a1f2665d647b895514bf3bb2413c60f81a4b49145d0270052ba4ce5ac"} Dec 10 07:08:52 crc kubenswrapper[4765]: I1210 07:08:52.127541 4765 generic.go:334] "Generic (PLEG): container finished" podID="fb3cd78e-072c-493c-89a3-670da9deb4e2" containerID="705c7b648240c1f3d9d43f56acbdd38076a00be7fcafe6e6bac1b7fea08bb9aa" exitCode=0 Dec 10 07:08:52 crc kubenswrapper[4765]: I1210 07:08:52.127571 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fb3cd78e-072c-493c-89a3-670da9deb4e2","Type":"ContainerDied","Data":"705c7b648240c1f3d9d43f56acbdd38076a00be7fcafe6e6bac1b7fea08bb9aa"} Dec 10 07:08:52 crc kubenswrapper[4765]: I1210 07:08:52.127592 4765 generic.go:334] "Generic (PLEG): container finished" podID="fb3cd78e-072c-493c-89a3-670da9deb4e2" containerID="74c70b6818eaac07cb6638833abccdc5acd2fce487900e85b40c8033a3673191" exitCode=143 Dec 10 07:08:52 crc kubenswrapper[4765]: I1210 07:08:52.127684 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fb3cd78e-072c-493c-89a3-670da9deb4e2","Type":"ContainerDied","Data":"74c70b6818eaac07cb6638833abccdc5acd2fce487900e85b40c8033a3673191"} Dec 10 07:08:53 crc kubenswrapper[4765]: I1210 07:08:53.143834 4765 generic.go:334] "Generic (PLEG): container finished" podID="485152ae-56fe-420a-8d8a-3127b6357103" containerID="20f0b6e27a7b4ff0b68bfdb9cb61de6e57e03e305bce2b157b1fc9b1c6cf96b1" exitCode=0 Dec 10 07:08:53 crc kubenswrapper[4765]: I1210 07:08:53.143884 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ctfh6" event={"ID":"485152ae-56fe-420a-8d8a-3127b6357103","Type":"ContainerDied","Data":"20f0b6e27a7b4ff0b68bfdb9cb61de6e57e03e305bce2b157b1fc9b1c6cf96b1"} Dec 10 07:08:53 crc kubenswrapper[4765]: I1210 07:08:53.173411 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=9.173381172 podStartE2EDuration="9.173381172s" podCreationTimestamp="2025-12-10 07:08:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:08:51.174793325 +0000 UTC m=+1250.901458641" watchObservedRunningTime="2025-12-10 07:08:53.173381172 +0000 UTC m=+1252.900046488" Dec 10 07:08:54 crc kubenswrapper[4765]: I1210 07:08:54.962368 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 10 07:08:54 crc kubenswrapper[4765]: I1210 07:08:54.971187 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.042810 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-scripts\") pod \"fb3cd78e-072c-493c-89a3-670da9deb4e2\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.042908 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-internal-tls-certs\") pod \"fb3cd78e-072c-493c-89a3-670da9deb4e2\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.042969 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"fb3cd78e-072c-493c-89a3-670da9deb4e2\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.056321 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb3cd78e-072c-493c-89a3-670da9deb4e2-logs\") pod \"fb3cd78e-072c-493c-89a3-670da9deb4e2\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.056506 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-credential-keys\") pod \"485152ae-56fe-420a-8d8a-3127b6357103\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.056541 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-combined-ca-bundle\") pod \"485152ae-56fe-420a-8d8a-3127b6357103\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.056584 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-scripts\") pod \"485152ae-56fe-420a-8d8a-3127b6357103\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.056611 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8d7w\" (UniqueName: \"kubernetes.io/projected/fb3cd78e-072c-493c-89a3-670da9deb4e2-kube-api-access-z8d7w\") pod \"fb3cd78e-072c-493c-89a3-670da9deb4e2\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.056705 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58mjk\" (UniqueName: \"kubernetes.io/projected/485152ae-56fe-420a-8d8a-3127b6357103-kube-api-access-58mjk\") pod \"485152ae-56fe-420a-8d8a-3127b6357103\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.056738 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fb3cd78e-072c-493c-89a3-670da9deb4e2-httpd-run\") pod \"fb3cd78e-072c-493c-89a3-670da9deb4e2\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.056765 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-combined-ca-bundle\") pod \"fb3cd78e-072c-493c-89a3-670da9deb4e2\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.056796 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-fernet-keys\") pod \"485152ae-56fe-420a-8d8a-3127b6357103\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.056847 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-config-data\") pod \"fb3cd78e-072c-493c-89a3-670da9deb4e2\" (UID: \"fb3cd78e-072c-493c-89a3-670da9deb4e2\") " Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.056945 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-config-data\") pod \"485152ae-56fe-420a-8d8a-3127b6357103\" (UID: \"485152ae-56fe-420a-8d8a-3127b6357103\") " Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.068866 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb3cd78e-072c-493c-89a3-670da9deb4e2-logs" (OuterVolumeSpecName: "logs") pod "fb3cd78e-072c-493c-89a3-670da9deb4e2" (UID: "fb3cd78e-072c-493c-89a3-670da9deb4e2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.087237 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb3cd78e-072c-493c-89a3-670da9deb4e2-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "fb3cd78e-072c-493c-89a3-670da9deb4e2" (UID: "fb3cd78e-072c-493c-89a3-670da9deb4e2"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.087318 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/485152ae-56fe-420a-8d8a-3127b6357103-kube-api-access-58mjk" (OuterVolumeSpecName: "kube-api-access-58mjk") pod "485152ae-56fe-420a-8d8a-3127b6357103" (UID: "485152ae-56fe-420a-8d8a-3127b6357103"). InnerVolumeSpecName "kube-api-access-58mjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.091288 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "485152ae-56fe-420a-8d8a-3127b6357103" (UID: "485152ae-56fe-420a-8d8a-3127b6357103"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.091426 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "fb3cd78e-072c-493c-89a3-670da9deb4e2" (UID: "fb3cd78e-072c-493c-89a3-670da9deb4e2"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.103406 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb3cd78e-072c-493c-89a3-670da9deb4e2-kube-api-access-z8d7w" (OuterVolumeSpecName: "kube-api-access-z8d7w") pod "fb3cd78e-072c-493c-89a3-670da9deb4e2" (UID: "fb3cd78e-072c-493c-89a3-670da9deb4e2"). InnerVolumeSpecName "kube-api-access-z8d7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.110772 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-scripts" (OuterVolumeSpecName: "scripts") pod "fb3cd78e-072c-493c-89a3-670da9deb4e2" (UID: "fb3cd78e-072c-493c-89a3-670da9deb4e2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.120708 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-scripts" (OuterVolumeSpecName: "scripts") pod "485152ae-56fe-420a-8d8a-3127b6357103" (UID: "485152ae-56fe-420a-8d8a-3127b6357103"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.131358 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "485152ae-56fe-420a-8d8a-3127b6357103" (UID: "485152ae-56fe-420a-8d8a-3127b6357103"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.160776 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-config-data" (OuterVolumeSpecName: "config-data") pod "485152ae-56fe-420a-8d8a-3127b6357103" (UID: "485152ae-56fe-420a-8d8a-3127b6357103"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.178025 4765 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.185218 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.185258 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8d7w\" (UniqueName: \"kubernetes.io/projected/fb3cd78e-072c-493c-89a3-670da9deb4e2-kube-api-access-z8d7w\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.185277 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58mjk\" (UniqueName: \"kubernetes.io/projected/485152ae-56fe-420a-8d8a-3127b6357103-kube-api-access-58mjk\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.185291 4765 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fb3cd78e-072c-493c-89a3-670da9deb4e2-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.185305 4765 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.185316 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.185327 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.185373 4765 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.185385 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb3cd78e-072c-493c-89a3-670da9deb4e2-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.224247 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "485152ae-56fe-420a-8d8a-3127b6357103" (UID: "485152ae-56fe-420a-8d8a-3127b6357103"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.226357 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fb3cd78e-072c-493c-89a3-670da9deb4e2","Type":"ContainerDied","Data":"a09abc2af2de765e9143aaf7cde81e6fca471fdedb30c17599eb8f68f87740d2"} Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.226404 4765 scope.go:117] "RemoveContainer" containerID="705c7b648240c1f3d9d43f56acbdd38076a00be7fcafe6e6bac1b7fea08bb9aa" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.226532 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.247181 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fb3cd78e-072c-493c-89a3-670da9deb4e2" (UID: "fb3cd78e-072c-493c-89a3-670da9deb4e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.264076 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ctfh6" event={"ID":"485152ae-56fe-420a-8d8a-3127b6357103","Type":"ContainerDied","Data":"d826a14039a80d51384ef2ea59ff5e4d86832e1c8c7615697a419ac0e8dc1160"} Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.264292 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d826a14039a80d51384ef2ea59ff5e4d86832e1c8c7615697a419ac0e8dc1160" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.264396 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ctfh6" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.284152 4765 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.288964 4765 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.288995 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/485152ae-56fe-420a-8d8a-3127b6357103-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.289007 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.291913 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "fb3cd78e-072c-493c-89a3-670da9deb4e2" (UID: "fb3cd78e-072c-493c-89a3-670da9deb4e2"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.299046 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-config-data" (OuterVolumeSpecName: "config-data") pod "fb3cd78e-072c-493c-89a3-670da9deb4e2" (UID: "fb3cd78e-072c-493c-89a3-670da9deb4e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.354212 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-ctfh6"] Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.367696 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-ctfh6"] Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.391636 4765 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.391687 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb3cd78e-072c-493c-89a3-670da9deb4e2-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.431551 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-dsf6c"] Dec 10 07:08:55 crc kubenswrapper[4765]: E1210 07:08:55.432703 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb3cd78e-072c-493c-89a3-670da9deb4e2" containerName="glance-httpd" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.432731 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb3cd78e-072c-493c-89a3-670da9deb4e2" containerName="glance-httpd" Dec 10 07:08:55 crc kubenswrapper[4765]: E1210 07:08:55.432762 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="705f6293-03e9-424e-9627-d82cf8493610" containerName="init" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.432770 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="705f6293-03e9-424e-9627-d82cf8493610" containerName="init" Dec 10 07:08:55 crc kubenswrapper[4765]: E1210 07:08:55.432799 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="485152ae-56fe-420a-8d8a-3127b6357103" containerName="keystone-bootstrap" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.432806 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="485152ae-56fe-420a-8d8a-3127b6357103" containerName="keystone-bootstrap" Dec 10 07:08:55 crc kubenswrapper[4765]: E1210 07:08:55.432852 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb3cd78e-072c-493c-89a3-670da9deb4e2" containerName="glance-log" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.432861 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb3cd78e-072c-493c-89a3-670da9deb4e2" containerName="glance-log" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.433890 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="705f6293-03e9-424e-9627-d82cf8493610" containerName="init" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.433928 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb3cd78e-072c-493c-89a3-670da9deb4e2" containerName="glance-httpd" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.433947 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb3cd78e-072c-493c-89a3-670da9deb4e2" containerName="glance-log" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.433962 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="485152ae-56fe-420a-8d8a-3127b6357103" containerName="keystone-bootstrap" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.435430 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.440559 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.441028 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.441146 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-vn8ql" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.442477 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.442680 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.471680 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dsf6c"] Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.572928 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.584007 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.596708 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-scripts\") pod \"keystone-bootstrap-dsf6c\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.596783 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-credential-keys\") pod \"keystone-bootstrap-dsf6c\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.596806 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-fernet-keys\") pod \"keystone-bootstrap-dsf6c\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.596863 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-config-data\") pod \"keystone-bootstrap-dsf6c\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.596914 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8flz8\" (UniqueName: \"kubernetes.io/projected/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-kube-api-access-8flz8\") pod \"keystone-bootstrap-dsf6c\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.596942 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-combined-ca-bundle\") pod \"keystone-bootstrap-dsf6c\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.605037 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.608001 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.611227 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.612071 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.624863 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.699744 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-scripts\") pod \"keystone-bootstrap-dsf6c\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.699824 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-credential-keys\") pod \"keystone-bootstrap-dsf6c\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.699859 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hp9vj\" (UniqueName: \"kubernetes.io/projected/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-kube-api-access-hp9vj\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.699889 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-fernet-keys\") pod \"keystone-bootstrap-dsf6c\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.699920 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.699964 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.700007 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-config-data\") pod \"keystone-bootstrap-dsf6c\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.700036 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.700074 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8flz8\" (UniqueName: \"kubernetes.io/projected/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-kube-api-access-8flz8\") pod \"keystone-bootstrap-dsf6c\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.700121 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-combined-ca-bundle\") pod \"keystone-bootstrap-dsf6c\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.700143 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-logs\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.700188 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.700210 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.700229 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.707998 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-fernet-keys\") pod \"keystone-bootstrap-dsf6c\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.708243 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-config-data\") pod \"keystone-bootstrap-dsf6c\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.708500 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-scripts\") pod \"keystone-bootstrap-dsf6c\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.709308 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-credential-keys\") pod \"keystone-bootstrap-dsf6c\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.709416 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-combined-ca-bundle\") pod \"keystone-bootstrap-dsf6c\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.724261 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8flz8\" (UniqueName: \"kubernetes.io/projected/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-kube-api-access-8flz8\") pod \"keystone-bootstrap-dsf6c\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.768374 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.801702 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.802112 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.802198 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hp9vj\" (UniqueName: \"kubernetes.io/projected/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-kube-api-access-hp9vj\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.802240 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.802276 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.802335 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.802402 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-logs\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.802485 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.803420 4765 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.806945 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-logs\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.807250 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.808366 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.809042 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.810195 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.823988 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.834920 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hp9vj\" (UniqueName: \"kubernetes.io/projected/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-kube-api-access-hp9vj\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.856656 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:08:55 crc kubenswrapper[4765]: I1210 07:08:55.947330 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 10 07:08:56 crc kubenswrapper[4765]: I1210 07:08:56.122978 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:08:56 crc kubenswrapper[4765]: I1210 07:08:56.193651 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79778dbd8c-9dw6f"] Dec 10 07:08:56 crc kubenswrapper[4765]: I1210 07:08:56.193923 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" podUID="c997c61a-e224-4bdf-b7ce-e1683985a3b2" containerName="dnsmasq-dns" containerID="cri-o://7142c04cc4e57c825e867cdb342a3a40fc9b114254c516d77632e8e8eeccce3b" gracePeriod=10 Dec 10 07:08:56 crc kubenswrapper[4765]: I1210 07:08:56.601258 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="485152ae-56fe-420a-8d8a-3127b6357103" path="/var/lib/kubelet/pods/485152ae-56fe-420a-8d8a-3127b6357103/volumes" Dec 10 07:08:56 crc kubenswrapper[4765]: I1210 07:08:56.601999 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb3cd78e-072c-493c-89a3-670da9deb4e2" path="/var/lib/kubelet/pods/fb3cd78e-072c-493c-89a3-670da9deb4e2/volumes" Dec 10 07:08:57 crc kubenswrapper[4765]: I1210 07:08:57.289828 4765 generic.go:334] "Generic (PLEG): container finished" podID="c997c61a-e224-4bdf-b7ce-e1683985a3b2" containerID="7142c04cc4e57c825e867cdb342a3a40fc9b114254c516d77632e8e8eeccce3b" exitCode=0 Dec 10 07:08:57 crc kubenswrapper[4765]: I1210 07:08:57.289870 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" event={"ID":"c997c61a-e224-4bdf-b7ce-e1683985a3b2","Type":"ContainerDied","Data":"7142c04cc4e57c825e867cdb342a3a40fc9b114254c516d77632e8e8eeccce3b"} Dec 10 07:08:58 crc kubenswrapper[4765]: I1210 07:08:58.864427 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" podUID="c997c61a-e224-4bdf-b7ce-e1683985a3b2" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: connect: connection refused" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.260493 4765 scope.go:117] "RemoveContainer" containerID="74c70b6818eaac07cb6638833abccdc5acd2fce487900e85b40c8033a3673191" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.343218 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a7c39636-f699-42af-9568-2109bffb40d7","Type":"ContainerDied","Data":"63d3331ac5634fd16f5354c368e54e6cb0f9d94ee3ef27a5849938c55542daa8"} Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.343750 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="63d3331ac5634fd16f5354c368e54e6cb0f9d94ee3ef27a5849938c55542daa8" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.363863 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.458792 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a7c39636-f699-42af-9568-2109bffb40d7-httpd-run\") pod \"a7c39636-f699-42af-9568-2109bffb40d7\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.458892 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-scripts\") pod \"a7c39636-f699-42af-9568-2109bffb40d7\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.458959 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-combined-ca-bundle\") pod \"a7c39636-f699-42af-9568-2109bffb40d7\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.458989 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-public-tls-certs\") pod \"a7c39636-f699-42af-9568-2109bffb40d7\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.459023 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxbb5\" (UniqueName: \"kubernetes.io/projected/a7c39636-f699-42af-9568-2109bffb40d7-kube-api-access-wxbb5\") pod \"a7c39636-f699-42af-9568-2109bffb40d7\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.459106 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-config-data\") pod \"a7c39636-f699-42af-9568-2109bffb40d7\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.459128 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"a7c39636-f699-42af-9568-2109bffb40d7\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.459177 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7c39636-f699-42af-9568-2109bffb40d7-logs\") pod \"a7c39636-f699-42af-9568-2109bffb40d7\" (UID: \"a7c39636-f699-42af-9568-2109bffb40d7\") " Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.459913 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7c39636-f699-42af-9568-2109bffb40d7-logs" (OuterVolumeSpecName: "logs") pod "a7c39636-f699-42af-9568-2109bffb40d7" (UID: "a7c39636-f699-42af-9568-2109bffb40d7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.460520 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7c39636-f699-42af-9568-2109bffb40d7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a7c39636-f699-42af-9568-2109bffb40d7" (UID: "a7c39636-f699-42af-9568-2109bffb40d7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.470592 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-scripts" (OuterVolumeSpecName: "scripts") pod "a7c39636-f699-42af-9568-2109bffb40d7" (UID: "a7c39636-f699-42af-9568-2109bffb40d7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.472227 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "a7c39636-f699-42af-9568-2109bffb40d7" (UID: "a7c39636-f699-42af-9568-2109bffb40d7"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.489321 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7c39636-f699-42af-9568-2109bffb40d7-kube-api-access-wxbb5" (OuterVolumeSpecName: "kube-api-access-wxbb5") pod "a7c39636-f699-42af-9568-2109bffb40d7" (UID: "a7c39636-f699-42af-9568-2109bffb40d7"). InnerVolumeSpecName "kube-api-access-wxbb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.561607 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxbb5\" (UniqueName: \"kubernetes.io/projected/a7c39636-f699-42af-9568-2109bffb40d7-kube-api-access-wxbb5\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.562049 4765 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.562062 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7c39636-f699-42af-9568-2109bffb40d7-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.562074 4765 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a7c39636-f699-42af-9568-2109bffb40d7-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.562098 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.580494 4765 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.664529 4765 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.733533 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a7c39636-f699-42af-9568-2109bffb40d7" (UID: "a7c39636-f699-42af-9568-2109bffb40d7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.759748 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-config-data" (OuterVolumeSpecName: "config-data") pod "a7c39636-f699-42af-9568-2109bffb40d7" (UID: "a7c39636-f699-42af-9568-2109bffb40d7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.760103 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a7c39636-f699-42af-9568-2109bffb40d7" (UID: "a7c39636-f699-42af-9568-2109bffb40d7"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.777962 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.777992 4765 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.778002 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7c39636-f699-42af-9568-2109bffb40d7-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:00 crc kubenswrapper[4765]: I1210 07:09:00.794477 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dsf6c"] Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.365468 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.406156 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.417019 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.426760 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 10 07:09:01 crc kubenswrapper[4765]: E1210 07:09:01.427176 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7c39636-f699-42af-9568-2109bffb40d7" containerName="glance-httpd" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.427195 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7c39636-f699-42af-9568-2109bffb40d7" containerName="glance-httpd" Dec 10 07:09:01 crc kubenswrapper[4765]: E1210 07:09:01.427221 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7c39636-f699-42af-9568-2109bffb40d7" containerName="glance-log" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.427228 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7c39636-f699-42af-9568-2109bffb40d7" containerName="glance-log" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.427396 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7c39636-f699-42af-9568-2109bffb40d7" containerName="glance-log" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.427419 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7c39636-f699-42af-9568-2109bffb40d7" containerName="glance-httpd" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.428395 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.431594 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.433174 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.448741 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.589052 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.590051 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.595038 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7cded555-d21d-4dac-9806-a6e1071683c1-logs\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.595098 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.595208 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7cded555-d21d-4dac-9806-a6e1071683c1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.595236 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p75rd\" (UniqueName: \"kubernetes.io/projected/7cded555-d21d-4dac-9806-a6e1071683c1-kube-api-access-p75rd\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.595282 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.595327 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.595410 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-config-data\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.595481 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-scripts\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.697120 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7cded555-d21d-4dac-9806-a6e1071683c1-logs\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.697166 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.697217 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7cded555-d21d-4dac-9806-a6e1071683c1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.697240 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p75rd\" (UniqueName: \"kubernetes.io/projected/7cded555-d21d-4dac-9806-a6e1071683c1-kube-api-access-p75rd\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.697271 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.697303 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.697363 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-config-data\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.697407 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-scripts\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.698664 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7cded555-d21d-4dac-9806-a6e1071683c1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.698741 4765 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.698915 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7cded555-d21d-4dac-9806-a6e1071683c1-logs\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.703042 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-config-data\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.706483 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.711293 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.712401 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-scripts\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.722484 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p75rd\" (UniqueName: \"kubernetes.io/projected/7cded555-d21d-4dac-9806-a6e1071683c1-kube-api-access-p75rd\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.739680 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " pod="openstack/glance-default-external-api-0" Dec 10 07:09:01 crc kubenswrapper[4765]: I1210 07:09:01.749579 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 10 07:09:02 crc kubenswrapper[4765]: I1210 07:09:02.602886 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7c39636-f699-42af-9568-2109bffb40d7" path="/var/lib/kubelet/pods/a7c39636-f699-42af-9568-2109bffb40d7/volumes" Dec 10 07:09:08 crc kubenswrapper[4765]: I1210 07:09:08.863955 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" podUID="c997c61a-e224-4bdf-b7ce-e1683985a3b2" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: i/o timeout" Dec 10 07:09:09 crc kubenswrapper[4765]: E1210 07:09:09.384192 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:fe32d3ea620f0c7ecfdde9bbf28417fde03bc18c6f60b1408fa8da24d8188f16" Dec 10 07:09:09 crc kubenswrapper[4765]: E1210 07:09:09.384353 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:fe32d3ea620f0c7ecfdde9bbf28417fde03bc18c6f60b1408fa8da24d8188f16,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mk92c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-2tnf4_openstack(c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:09:09 crc kubenswrapper[4765]: E1210 07:09:09.385514 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-2tnf4" podUID="c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb" Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.458621 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dsf6c" event={"ID":"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2","Type":"ContainerStarted","Data":"7f7f067b82553823093852f28ab985c590968275a535b58c6f915b31ecdbe22d"} Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.461233 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" event={"ID":"c997c61a-e224-4bdf-b7ce-e1683985a3b2","Type":"ContainerDied","Data":"86b8a27c3026be68c6ef4b9da6e986b9946e12405d20b4931ababe7732fc9dc1"} Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.461312 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86b8a27c3026be68c6ef4b9da6e986b9946e12405d20b4931ababe7732fc9dc1" Dec 10 07:09:09 crc kubenswrapper[4765]: E1210 07:09:09.465736 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:fe32d3ea620f0c7ecfdde9bbf28417fde03bc18c6f60b1408fa8da24d8188f16\\\"\"" pod="openstack/barbican-db-sync-2tnf4" podUID="c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb" Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.479342 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.538307 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dl585\" (UniqueName: \"kubernetes.io/projected/c997c61a-e224-4bdf-b7ce-e1683985a3b2-kube-api-access-dl585\") pod \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.538415 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-ovsdbserver-nb\") pod \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.538454 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-ovsdbserver-sb\") pod \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.538678 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-dns-svc\") pod \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.538761 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-config\") pod \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\" (UID: \"c997c61a-e224-4bdf-b7ce-e1683985a3b2\") " Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.548378 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c997c61a-e224-4bdf-b7ce-e1683985a3b2-kube-api-access-dl585" (OuterVolumeSpecName: "kube-api-access-dl585") pod "c997c61a-e224-4bdf-b7ce-e1683985a3b2" (UID: "c997c61a-e224-4bdf-b7ce-e1683985a3b2"). InnerVolumeSpecName "kube-api-access-dl585". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.587680 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c997c61a-e224-4bdf-b7ce-e1683985a3b2" (UID: "c997c61a-e224-4bdf-b7ce-e1683985a3b2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.590901 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c997c61a-e224-4bdf-b7ce-e1683985a3b2" (UID: "c997c61a-e224-4bdf-b7ce-e1683985a3b2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.593230 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c997c61a-e224-4bdf-b7ce-e1683985a3b2" (UID: "c997c61a-e224-4bdf-b7ce-e1683985a3b2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.608509 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-config" (OuterVolumeSpecName: "config") pod "c997c61a-e224-4bdf-b7ce-e1683985a3b2" (UID: "c997c61a-e224-4bdf-b7ce-e1683985a3b2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.643004 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dl585\" (UniqueName: \"kubernetes.io/projected/c997c61a-e224-4bdf-b7ce-e1683985a3b2-kube-api-access-dl585\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.643201 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.643412 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.643964 4765 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:09 crc kubenswrapper[4765]: I1210 07:09:09.644039 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c997c61a-e224-4bdf-b7ce-e1683985a3b2-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:10 crc kubenswrapper[4765]: I1210 07:09:10.471359 4765 generic.go:334] "Generic (PLEG): container finished" podID="ee0d3c38-da7c-46ad-ad72-5870e7b61db0" containerID="33a35f8c3534b6fc13d601dde4481aaa8bb1b8d1e0037c77c68ad4bee8e3a61a" exitCode=0 Dec 10 07:09:10 crc kubenswrapper[4765]: I1210 07:09:10.471460 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" Dec 10 07:09:10 crc kubenswrapper[4765]: I1210 07:09:10.471451 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-tjvqv" event={"ID":"ee0d3c38-da7c-46ad-ad72-5870e7b61db0","Type":"ContainerDied","Data":"33a35f8c3534b6fc13d601dde4481aaa8bb1b8d1e0037c77c68ad4bee8e3a61a"} Dec 10 07:09:10 crc kubenswrapper[4765]: I1210 07:09:10.528737 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79778dbd8c-9dw6f"] Dec 10 07:09:10 crc kubenswrapper[4765]: I1210 07:09:10.540812 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-79778dbd8c-9dw6f"] Dec 10 07:09:10 crc kubenswrapper[4765]: I1210 07:09:10.599066 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c997c61a-e224-4bdf-b7ce-e1683985a3b2" path="/var/lib/kubelet/pods/c997c61a-e224-4bdf-b7ce-e1683985a3b2/volumes" Dec 10 07:09:10 crc kubenswrapper[4765]: E1210 07:09:10.853327 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49" Dec 10 07:09:10 crc kubenswrapper[4765]: E1210 07:09:10.853793 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ml6kc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-bxqxj_openstack(46580548-0bb0-4026-821b-2ee72fc56f70): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 07:09:10 crc kubenswrapper[4765]: E1210 07:09:10.855062 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-bxqxj" podUID="46580548-0bb0-4026-821b-2ee72fc56f70" Dec 10 07:09:10 crc kubenswrapper[4765]: I1210 07:09:10.858182 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 10 07:09:11 crc kubenswrapper[4765]: I1210 07:09:11.487793 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 10 07:09:11 crc kubenswrapper[4765]: I1210 07:09:11.499116 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a408295-9dcc-4bde-8f4c-019bc7585479","Type":"ContainerStarted","Data":"9a4040ee2b199024eab0c4287d221a7658920bcd1daca0b3e1d75a6f207395c4"} Dec 10 07:09:11 crc kubenswrapper[4765]: I1210 07:09:11.515351 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e13fb850-ffa8-4878-a9c0-4b3e573e8f05","Type":"ContainerStarted","Data":"a0efed9797258a13105a3de4e32240d1adefe0bf511129d21528a8994575b0e0"} Dec 10 07:09:11 crc kubenswrapper[4765]: I1210 07:09:11.531542 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dsf6c" event={"ID":"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2","Type":"ContainerStarted","Data":"ac3689098ac38d2b4d8268641e0b2632892fba929773559138ff9e1b54c7ea38"} Dec 10 07:09:11 crc kubenswrapper[4765]: I1210 07:09:11.556663 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-dsf6c" podStartSLOduration=16.55664371 podStartE2EDuration="16.55664371s" podCreationTimestamp="2025-12-10 07:08:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:09:11.551074282 +0000 UTC m=+1271.277739608" watchObservedRunningTime="2025-12-10 07:09:11.55664371 +0000 UTC m=+1271.283309036" Dec 10 07:09:11 crc kubenswrapper[4765]: I1210 07:09:11.558841 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-wlh48" event={"ID":"182a289b-2c34-48a5-975d-94eccdf449fe","Type":"ContainerStarted","Data":"6afc810f2a9aceb67b106d7016d20552f7d41d67ab27474e4b7250b981417bb4"} Dec 10 07:09:11 crc kubenswrapper[4765]: E1210 07:09:11.565991 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49\\\"\"" pod="openstack/cinder-db-sync-bxqxj" podUID="46580548-0bb0-4026-821b-2ee72fc56f70" Dec 10 07:09:11 crc kubenswrapper[4765]: I1210 07:09:11.595901 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-wlh48" podStartSLOduration=2.729431903 podStartE2EDuration="26.595870943s" podCreationTimestamp="2025-12-10 07:08:45 +0000 UTC" firstStartedPulling="2025-12-10 07:08:46.987459514 +0000 UTC m=+1246.714124830" lastFinishedPulling="2025-12-10 07:09:10.853898554 +0000 UTC m=+1270.580563870" observedRunningTime="2025-12-10 07:09:11.579765586 +0000 UTC m=+1271.306430902" watchObservedRunningTime="2025-12-10 07:09:11.595870943 +0000 UTC m=+1271.322536259" Dec 10 07:09:11 crc kubenswrapper[4765]: W1210 07:09:11.647256 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7cded555_d21d_4dac_9806_a6e1071683c1.slice/crio-55aa5f9ad7167502e70c7524a9ca0986bc53678ca59f2f3da255499cc7273986 WatchSource:0}: Error finding container 55aa5f9ad7167502e70c7524a9ca0986bc53678ca59f2f3da255499cc7273986: Status 404 returned error can't find the container with id 55aa5f9ad7167502e70c7524a9ca0986bc53678ca59f2f3da255499cc7273986 Dec 10 07:09:11 crc kubenswrapper[4765]: I1210 07:09:11.654611 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 10 07:09:11 crc kubenswrapper[4765]: I1210 07:09:11.982520 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-tjvqv" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.104595 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzkvl\" (UniqueName: \"kubernetes.io/projected/ee0d3c38-da7c-46ad-ad72-5870e7b61db0-kube-api-access-mzkvl\") pod \"ee0d3c38-da7c-46ad-ad72-5870e7b61db0\" (UID: \"ee0d3c38-da7c-46ad-ad72-5870e7b61db0\") " Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.104730 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0d3c38-da7c-46ad-ad72-5870e7b61db0-combined-ca-bundle\") pod \"ee0d3c38-da7c-46ad-ad72-5870e7b61db0\" (UID: \"ee0d3c38-da7c-46ad-ad72-5870e7b61db0\") " Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.104920 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ee0d3c38-da7c-46ad-ad72-5870e7b61db0-config\") pod \"ee0d3c38-da7c-46ad-ad72-5870e7b61db0\" (UID: \"ee0d3c38-da7c-46ad-ad72-5870e7b61db0\") " Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.126307 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee0d3c38-da7c-46ad-ad72-5870e7b61db0-kube-api-access-mzkvl" (OuterVolumeSpecName: "kube-api-access-mzkvl") pod "ee0d3c38-da7c-46ad-ad72-5870e7b61db0" (UID: "ee0d3c38-da7c-46ad-ad72-5870e7b61db0"). InnerVolumeSpecName "kube-api-access-mzkvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.140528 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee0d3c38-da7c-46ad-ad72-5870e7b61db0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ee0d3c38-da7c-46ad-ad72-5870e7b61db0" (UID: "ee0d3c38-da7c-46ad-ad72-5870e7b61db0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.154628 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee0d3c38-da7c-46ad-ad72-5870e7b61db0-config" (OuterVolumeSpecName: "config") pod "ee0d3c38-da7c-46ad-ad72-5870e7b61db0" (UID: "ee0d3c38-da7c-46ad-ad72-5870e7b61db0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.208616 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/ee0d3c38-da7c-46ad-ad72-5870e7b61db0-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.209558 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzkvl\" (UniqueName: \"kubernetes.io/projected/ee0d3c38-da7c-46ad-ad72-5870e7b61db0-kube-api-access-mzkvl\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.209583 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee0d3c38-da7c-46ad-ad72-5870e7b61db0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.372119 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.575338 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e13fb850-ffa8-4878-a9c0-4b3e573e8f05","Type":"ContainerStarted","Data":"c15c2f490ff8394ffe1c2fa4839597e8ec8344efc1ccaf0f0cffc344c9ae6391"} Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.580450 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-tjvqv" event={"ID":"ee0d3c38-da7c-46ad-ad72-5870e7b61db0","Type":"ContainerDied","Data":"02e9818bc4428ec7b029632a5069a65af583c872747604531f1af4485faa7a01"} Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.580496 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="02e9818bc4428ec7b029632a5069a65af583c872747604531f1af4485faa7a01" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.580568 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-tjvqv" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.583478 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7cded555-d21d-4dac-9806-a6e1071683c1","Type":"ContainerStarted","Data":"7f219892c61ed822df7b7ed0213204ecc23cae587dcf3561a460b12cc40a5a70"} Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.583528 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7cded555-d21d-4dac-9806-a6e1071683c1","Type":"ContainerStarted","Data":"55aa5f9ad7167502e70c7524a9ca0986bc53678ca59f2f3da255499cc7273986"} Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.682167 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f9987d8d9-gwj4t"] Dec 10 07:09:12 crc kubenswrapper[4765]: E1210 07:09:12.682680 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c997c61a-e224-4bdf-b7ce-e1683985a3b2" containerName="init" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.682703 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c997c61a-e224-4bdf-b7ce-e1683985a3b2" containerName="init" Dec 10 07:09:12 crc kubenswrapper[4765]: E1210 07:09:12.682727 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c997c61a-e224-4bdf-b7ce-e1683985a3b2" containerName="dnsmasq-dns" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.682735 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c997c61a-e224-4bdf-b7ce-e1683985a3b2" containerName="dnsmasq-dns" Dec 10 07:09:12 crc kubenswrapper[4765]: E1210 07:09:12.682758 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee0d3c38-da7c-46ad-ad72-5870e7b61db0" containerName="neutron-db-sync" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.682767 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee0d3c38-da7c-46ad-ad72-5870e7b61db0" containerName="neutron-db-sync" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.683011 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee0d3c38-da7c-46ad-ad72-5870e7b61db0" containerName="neutron-db-sync" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.683037 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="c997c61a-e224-4bdf-b7ce-e1683985a3b2" containerName="dnsmasq-dns" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.684133 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.716315 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f9987d8d9-gwj4t"] Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.719298 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-ovsdbserver-nb\") pod \"dnsmasq-dns-f9987d8d9-gwj4t\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.719371 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-config\") pod \"dnsmasq-dns-f9987d8d9-gwj4t\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.719398 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzngq\" (UniqueName: \"kubernetes.io/projected/9909c4a1-0c26-4db9-8f34-3e14ad438864-kube-api-access-kzngq\") pod \"dnsmasq-dns-f9987d8d9-gwj4t\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.719483 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-ovsdbserver-sb\") pod \"dnsmasq-dns-f9987d8d9-gwj4t\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.719550 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-dns-svc\") pod \"dnsmasq-dns-f9987d8d9-gwj4t\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.822268 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-dns-svc\") pod \"dnsmasq-dns-f9987d8d9-gwj4t\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.822354 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-ovsdbserver-nb\") pod \"dnsmasq-dns-f9987d8d9-gwj4t\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.822400 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-config\") pod \"dnsmasq-dns-f9987d8d9-gwj4t\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.822482 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzngq\" (UniqueName: \"kubernetes.io/projected/9909c4a1-0c26-4db9-8f34-3e14ad438864-kube-api-access-kzngq\") pod \"dnsmasq-dns-f9987d8d9-gwj4t\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.822569 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-ovsdbserver-sb\") pod \"dnsmasq-dns-f9987d8d9-gwj4t\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.823521 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-ovsdbserver-sb\") pod \"dnsmasq-dns-f9987d8d9-gwj4t\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.823698 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-ovsdbserver-nb\") pod \"dnsmasq-dns-f9987d8d9-gwj4t\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.824199 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-config\") pod \"dnsmasq-dns-f9987d8d9-gwj4t\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.824435 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-dns-svc\") pod \"dnsmasq-dns-f9987d8d9-gwj4t\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.828924 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-794648c68d-ljskq"] Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.831038 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.838687 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-794648c68d-ljskq"] Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.851269 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.853813 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.854111 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.854284 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-r8bsz" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.908778 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzngq\" (UniqueName: \"kubernetes.io/projected/9909c4a1-0c26-4db9-8f34-3e14ad438864-kube-api-access-kzngq\") pod \"dnsmasq-dns-f9987d8d9-gwj4t\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.925153 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-combined-ca-bundle\") pod \"neutron-794648c68d-ljskq\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.925212 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-ovndb-tls-certs\") pod \"neutron-794648c68d-ljskq\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.925265 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-httpd-config\") pod \"neutron-794648c68d-ljskq\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.925339 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-config\") pod \"neutron-794648c68d-ljskq\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:12 crc kubenswrapper[4765]: I1210 07:09:12.925366 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsmzz\" (UniqueName: \"kubernetes.io/projected/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-kube-api-access-xsmzz\") pod \"neutron-794648c68d-ljskq\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:13 crc kubenswrapper[4765]: I1210 07:09:13.017518 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:13 crc kubenswrapper[4765]: I1210 07:09:13.027602 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-config\") pod \"neutron-794648c68d-ljskq\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:13 crc kubenswrapper[4765]: I1210 07:09:13.027677 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsmzz\" (UniqueName: \"kubernetes.io/projected/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-kube-api-access-xsmzz\") pod \"neutron-794648c68d-ljskq\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:13 crc kubenswrapper[4765]: I1210 07:09:13.027711 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-combined-ca-bundle\") pod \"neutron-794648c68d-ljskq\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:13 crc kubenswrapper[4765]: I1210 07:09:13.027743 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-ovndb-tls-certs\") pod \"neutron-794648c68d-ljskq\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:13 crc kubenswrapper[4765]: I1210 07:09:13.027862 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-httpd-config\") pod \"neutron-794648c68d-ljskq\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:13 crc kubenswrapper[4765]: I1210 07:09:13.038508 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-combined-ca-bundle\") pod \"neutron-794648c68d-ljskq\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:13 crc kubenswrapper[4765]: I1210 07:09:13.038540 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-ovndb-tls-certs\") pod \"neutron-794648c68d-ljskq\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:13 crc kubenswrapper[4765]: I1210 07:09:13.040050 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-config\") pod \"neutron-794648c68d-ljskq\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:13 crc kubenswrapper[4765]: I1210 07:09:13.041315 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-httpd-config\") pod \"neutron-794648c68d-ljskq\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:13 crc kubenswrapper[4765]: I1210 07:09:13.048817 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsmzz\" (UniqueName: \"kubernetes.io/projected/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-kube-api-access-xsmzz\") pod \"neutron-794648c68d-ljskq\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:13 crc kubenswrapper[4765]: I1210 07:09:13.155691 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:13 crc kubenswrapper[4765]: I1210 07:09:13.611872 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerStarted","Data":"47a6992e3f0b8ebb8c2ac3bece8d28b964b32584f5893c5295ffba6b0965a1f0"} Dec 10 07:09:13 crc kubenswrapper[4765]: I1210 07:09:13.614177 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a408295-9dcc-4bde-8f4c-019bc7585479","Type":"ContainerStarted","Data":"3bd7b57c49d8c40349721b3f40f74a16d2e7f0d15aaef2b718bb938bddc3df91"} Dec 10 07:09:13 crc kubenswrapper[4765]: I1210 07:09:13.870202 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-79778dbd8c-9dw6f" podUID="c997c61a-e224-4bdf-b7ce-e1683985a3b2" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: i/o timeout" Dec 10 07:09:13 crc kubenswrapper[4765]: I1210 07:09:13.894426 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f9987d8d9-gwj4t"] Dec 10 07:09:14 crc kubenswrapper[4765]: I1210 07:09:14.209028 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-794648c68d-ljskq"] Dec 10 07:09:14 crc kubenswrapper[4765]: I1210 07:09:14.627288 4765 generic.go:334] "Generic (PLEG): container finished" podID="182a289b-2c34-48a5-975d-94eccdf449fe" containerID="6afc810f2a9aceb67b106d7016d20552f7d41d67ab27474e4b7250b981417bb4" exitCode=0 Dec 10 07:09:14 crc kubenswrapper[4765]: I1210 07:09:14.627656 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-wlh48" event={"ID":"182a289b-2c34-48a5-975d-94eccdf449fe","Type":"ContainerDied","Data":"6afc810f2a9aceb67b106d7016d20552f7d41d67ab27474e4b7250b981417bb4"} Dec 10 07:09:14 crc kubenswrapper[4765]: I1210 07:09:14.650853 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e13fb850-ffa8-4878-a9c0-4b3e573e8f05","Type":"ContainerStarted","Data":"e1c63edc425dc442f6fd790a994ed00149e9a439553175ea45fed29700765c2c"} Dec 10 07:09:14 crc kubenswrapper[4765]: I1210 07:09:14.670386 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7cded555-d21d-4dac-9806-a6e1071683c1","Type":"ContainerStarted","Data":"76e8fe88229afb1d9ca3ad856a94ccb359d8115b2efae3b4af33f3bfb4e92487"} Dec 10 07:09:14 crc kubenswrapper[4765]: I1210 07:09:14.680419 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=19.680396503 podStartE2EDuration="19.680396503s" podCreationTimestamp="2025-12-10 07:08:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:09:14.678106708 +0000 UTC m=+1274.404772024" watchObservedRunningTime="2025-12-10 07:09:14.680396503 +0000 UTC m=+1274.407061819" Dec 10 07:09:14 crc kubenswrapper[4765]: I1210 07:09:14.686558 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-794648c68d-ljskq" event={"ID":"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f","Type":"ContainerStarted","Data":"e51c4964654e76eb69f07ecd911e2d3087a3f5c9e17b3f63395cb6fa78a3c82a"} Dec 10 07:09:14 crc kubenswrapper[4765]: I1210 07:09:14.687061 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-794648c68d-ljskq" event={"ID":"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f","Type":"ContainerStarted","Data":"06c54fb988dcd93b7882a5423a7449834a98f17281e6700abdda1662c46b06c8"} Dec 10 07:09:14 crc kubenswrapper[4765]: I1210 07:09:14.698835 4765 generic.go:334] "Generic (PLEG): container finished" podID="9909c4a1-0c26-4db9-8f34-3e14ad438864" containerID="8ba9684d3435246fb54936824b9d524ed419d26cebfcd4b10924f5ef83e48f06" exitCode=0 Dec 10 07:09:14 crc kubenswrapper[4765]: I1210 07:09:14.698905 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" event={"ID":"9909c4a1-0c26-4db9-8f34-3e14ad438864","Type":"ContainerDied","Data":"8ba9684d3435246fb54936824b9d524ed419d26cebfcd4b10924f5ef83e48f06"} Dec 10 07:09:14 crc kubenswrapper[4765]: I1210 07:09:14.698940 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" event={"ID":"9909c4a1-0c26-4db9-8f34-3e14ad438864","Type":"ContainerStarted","Data":"4917e027ab6c544f5c4df83c4be3cef81ad6d1c3fa4c06641c9949cf7cd8de18"} Dec 10 07:09:14 crc kubenswrapper[4765]: I1210 07:09:14.710295 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=13.710260561 podStartE2EDuration="13.710260561s" podCreationTimestamp="2025-12-10 07:09:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:09:14.704408525 +0000 UTC m=+1274.431073851" watchObservedRunningTime="2025-12-10 07:09:14.710260561 +0000 UTC m=+1274.436925877" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.705516 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-654b8cdb7c-84l5p"] Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.707887 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.717829 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.718560 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.721298 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-654b8cdb7c-84l5p"] Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.820911 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-combined-ca-bundle\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.821074 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-internal-tls-certs\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.821142 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-ovndb-tls-certs\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.821209 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5dzm\" (UniqueName: \"kubernetes.io/projected/4e9d4a75-10e4-46dd-9180-821c917a2b5e-kube-api-access-n5dzm\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.821401 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-public-tls-certs\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.821564 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-config\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.821697 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-httpd-config\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.922891 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-config\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.922954 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-httpd-config\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.922986 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-combined-ca-bundle\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.923016 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-internal-tls-certs\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.923043 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-ovndb-tls-certs\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.923107 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5dzm\" (UniqueName: \"kubernetes.io/projected/4e9d4a75-10e4-46dd-9180-821c917a2b5e-kube-api-access-n5dzm\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.923185 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-public-tls-certs\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.948032 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.948140 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.958644 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-httpd-config\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.961308 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-config\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.961922 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-public-tls-certs\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.966959 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-combined-ca-bundle\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.967550 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5dzm\" (UniqueName: \"kubernetes.io/projected/4e9d4a75-10e4-46dd-9180-821c917a2b5e-kube-api-access-n5dzm\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:15 crc kubenswrapper[4765]: I1210 07:09:15.968889 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-internal-tls-certs\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.000553 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-ovndb-tls-certs\") pod \"neutron-654b8cdb7c-84l5p\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.024027 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.063992 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.117621 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.347516 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-wlh48" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.436507 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/182a289b-2c34-48a5-975d-94eccdf449fe-scripts\") pod \"182a289b-2c34-48a5-975d-94eccdf449fe\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.436995 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/182a289b-2c34-48a5-975d-94eccdf449fe-logs\") pod \"182a289b-2c34-48a5-975d-94eccdf449fe\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.437080 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ntjc2\" (UniqueName: \"kubernetes.io/projected/182a289b-2c34-48a5-975d-94eccdf449fe-kube-api-access-ntjc2\") pod \"182a289b-2c34-48a5-975d-94eccdf449fe\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.437200 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/182a289b-2c34-48a5-975d-94eccdf449fe-config-data\") pod \"182a289b-2c34-48a5-975d-94eccdf449fe\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.437234 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/182a289b-2c34-48a5-975d-94eccdf449fe-combined-ca-bundle\") pod \"182a289b-2c34-48a5-975d-94eccdf449fe\" (UID: \"182a289b-2c34-48a5-975d-94eccdf449fe\") " Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.438619 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/182a289b-2c34-48a5-975d-94eccdf449fe-logs" (OuterVolumeSpecName: "logs") pod "182a289b-2c34-48a5-975d-94eccdf449fe" (UID: "182a289b-2c34-48a5-975d-94eccdf449fe"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.458518 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/182a289b-2c34-48a5-975d-94eccdf449fe-kube-api-access-ntjc2" (OuterVolumeSpecName: "kube-api-access-ntjc2") pod "182a289b-2c34-48a5-975d-94eccdf449fe" (UID: "182a289b-2c34-48a5-975d-94eccdf449fe"). InnerVolumeSpecName "kube-api-access-ntjc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.458561 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/182a289b-2c34-48a5-975d-94eccdf449fe-scripts" (OuterVolumeSpecName: "scripts") pod "182a289b-2c34-48a5-975d-94eccdf449fe" (UID: "182a289b-2c34-48a5-975d-94eccdf449fe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.481892 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/182a289b-2c34-48a5-975d-94eccdf449fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "182a289b-2c34-48a5-975d-94eccdf449fe" (UID: "182a289b-2c34-48a5-975d-94eccdf449fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.506803 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/182a289b-2c34-48a5-975d-94eccdf449fe-config-data" (OuterVolumeSpecName: "config-data") pod "182a289b-2c34-48a5-975d-94eccdf449fe" (UID: "182a289b-2c34-48a5-975d-94eccdf449fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.538873 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/182a289b-2c34-48a5-975d-94eccdf449fe-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.538904 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/182a289b-2c34-48a5-975d-94eccdf449fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.538915 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/182a289b-2c34-48a5-975d-94eccdf449fe-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.538923 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/182a289b-2c34-48a5-975d-94eccdf449fe-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.538932 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ntjc2\" (UniqueName: \"kubernetes.io/projected/182a289b-2c34-48a5-975d-94eccdf449fe-kube-api-access-ntjc2\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.781106 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-wlh48" event={"ID":"182a289b-2c34-48a5-975d-94eccdf449fe","Type":"ContainerDied","Data":"c0a1e078075f53840c53422576f40ae63b5b0210472318d3ade80df382077dca"} Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.781179 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c0a1e078075f53840c53422576f40ae63b5b0210472318d3ade80df382077dca" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.781274 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-wlh48" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.788044 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerStarted","Data":"4a3613cd84d23f05f8318366d8316a06d3e5f1244631e3c33dbba7434ad440a0"} Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.818690 4765 generic.go:334] "Generic (PLEG): container finished" podID="6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2" containerID="ac3689098ac38d2b4d8268641e0b2632892fba929773559138ff9e1b54c7ea38" exitCode=0 Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.819061 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dsf6c" event={"ID":"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2","Type":"ContainerDied","Data":"ac3689098ac38d2b4d8268641e0b2632892fba929773559138ff9e1b54c7ea38"} Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.892451 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-794648c68d-ljskq" event={"ID":"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f","Type":"ContainerStarted","Data":"c850c3922852a2096a591eb3bb5de7aa04e687acbf76ab0e6f10260559ce7766"} Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.894169 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.940252 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" event={"ID":"9909c4a1-0c26-4db9-8f34-3e14ad438864","Type":"ContainerStarted","Data":"350a7a37130177d4525343650de8cb2208f3925f7e5193aae5d150e8546d7bb4"} Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.940297 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.940316 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 10 07:09:16 crc kubenswrapper[4765]: I1210 07:09:16.941492 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:16.997497 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-794648c68d-ljskq" podStartSLOduration=4.997471309 podStartE2EDuration="4.997471309s" podCreationTimestamp="2025-12-10 07:09:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:09:16.984168012 +0000 UTC m=+1276.710833318" watchObservedRunningTime="2025-12-10 07:09:16.997471309 +0000 UTC m=+1276.724136625" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.028187 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-75b8c6446d-lf487"] Dec 10 07:09:17 crc kubenswrapper[4765]: E1210 07:09:17.028632 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="182a289b-2c34-48a5-975d-94eccdf449fe" containerName="placement-db-sync" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.028645 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="182a289b-2c34-48a5-975d-94eccdf449fe" containerName="placement-db-sync" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.028854 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="182a289b-2c34-48a5-975d-94eccdf449fe" containerName="placement-db-sync" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.029900 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.036963 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.037314 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.037603 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.037669 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.039298 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-zvbg6" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.046599 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-75b8c6446d-lf487"] Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.048949 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" podStartSLOduration=5.04892962 podStartE2EDuration="5.04892962s" podCreationTimestamp="2025-12-10 07:09:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:09:17.023671743 +0000 UTC m=+1276.750337059" watchObservedRunningTime="2025-12-10 07:09:17.04892962 +0000 UTC m=+1276.775594936" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.104792 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-654b8cdb7c-84l5p"] Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.182377 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-combined-ca-bundle\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.182443 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7whh5\" (UniqueName: \"kubernetes.io/projected/5cbf2f96-d196-413b-841a-9b753e6beae2-kube-api-access-7whh5\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.182473 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5cbf2f96-d196-413b-841a-9b753e6beae2-logs\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.182498 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-scripts\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.182559 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-internal-tls-certs\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.182591 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-config-data\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.182629 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-public-tls-certs\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.285618 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-internal-tls-certs\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.285839 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-config-data\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.285933 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-public-tls-certs\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.286025 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-combined-ca-bundle\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.286095 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7whh5\" (UniqueName: \"kubernetes.io/projected/5cbf2f96-d196-413b-841a-9b753e6beae2-kube-api-access-7whh5\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.286129 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5cbf2f96-d196-413b-841a-9b753e6beae2-logs\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.286172 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-scripts\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.289003 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5cbf2f96-d196-413b-841a-9b753e6beae2-logs\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.292430 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-scripts\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.292964 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-public-tls-certs\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.296572 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-internal-tls-certs\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.297295 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-combined-ca-bundle\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.298378 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-config-data\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.310846 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7whh5\" (UniqueName: \"kubernetes.io/projected/5cbf2f96-d196-413b-841a-9b753e6beae2-kube-api-access-7whh5\") pod \"placement-75b8c6446d-lf487\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.375868 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.951410 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-794648c68d-ljskq_cc86d6d5-ff4f-4d50-9587-b6661e7ac16f/neutron-httpd/0.log" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.952373 4765 generic.go:334] "Generic (PLEG): container finished" podID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" containerID="c850c3922852a2096a591eb3bb5de7aa04e687acbf76ab0e6f10260559ce7766" exitCode=1 Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.952426 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-794648c68d-ljskq" event={"ID":"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f","Type":"ContainerDied","Data":"c850c3922852a2096a591eb3bb5de7aa04e687acbf76ab0e6f10260559ce7766"} Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.955831 4765 scope.go:117] "RemoveContainer" containerID="c850c3922852a2096a591eb3bb5de7aa04e687acbf76ab0e6f10260559ce7766" Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.968871 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerStarted","Data":"b544a922da14385b7b5a0a7564e80057aa0adf7c8d4fad846360fa1ca4a7f25d"} Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.968948 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerStarted","Data":"ffc515962d5270d5f14f12935475f1e6e227235b5b9a72a37f6927cde51add52"} Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.974291 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-75b8c6446d-lf487"] Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.977395 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-654b8cdb7c-84l5p" event={"ID":"4e9d4a75-10e4-46dd-9180-821c917a2b5e","Type":"ContainerStarted","Data":"2b76a61363bc9d6919fda1604c79fc3c7f411448fef6487068bdd864dabfac24"} Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.977448 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-654b8cdb7c-84l5p" event={"ID":"4e9d4a75-10e4-46dd-9180-821c917a2b5e","Type":"ContainerStarted","Data":"1e66dbb7593398b7b65f4ca08f3dd8507c55a48af39dcf35231d9d262161ded4"} Dec 10 07:09:17 crc kubenswrapper[4765]: I1210 07:09:17.977467 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-654b8cdb7c-84l5p" event={"ID":"4e9d4a75-10e4-46dd-9180-821c917a2b5e","Type":"ContainerStarted","Data":"e1a1b8adee1854c555d369972772b8dd5cfab049835af6ed630ec9a3f7573dd0"} Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.032966 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-654b8cdb7c-84l5p" podStartSLOduration=3.03294428 podStartE2EDuration="3.03294428s" podCreationTimestamp="2025-12-10 07:09:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:09:18.021478704 +0000 UTC m=+1277.748144020" watchObservedRunningTime="2025-12-10 07:09:18.03294428 +0000 UTC m=+1277.759609596" Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.480854 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.632767 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-config-data\") pod \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.632829 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-credential-keys\") pod \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.632935 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-scripts\") pod \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.632976 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8flz8\" (UniqueName: \"kubernetes.io/projected/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-kube-api-access-8flz8\") pod \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.633117 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-fernet-keys\") pod \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.633180 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-combined-ca-bundle\") pod \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\" (UID: \"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2\") " Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.648240 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2" (UID: "6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.649790 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-scripts" (OuterVolumeSpecName: "scripts") pod "6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2" (UID: "6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.652825 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-kube-api-access-8flz8" (OuterVolumeSpecName: "kube-api-access-8flz8") pod "6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2" (UID: "6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2"). InnerVolumeSpecName "kube-api-access-8flz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.653254 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2" (UID: "6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.669497 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2" (UID: "6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.693158 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-config-data" (OuterVolumeSpecName: "config-data") pod "6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2" (UID: "6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.735836 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.735877 4765 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.735888 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.735903 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8flz8\" (UniqueName: \"kubernetes.io/projected/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-kube-api-access-8flz8\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.735915 4765 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:18 crc kubenswrapper[4765]: I1210 07:09:18.735924 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.000791 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-586f9fc866-46mgw"] Dec 10 07:09:19 crc kubenswrapper[4765]: E1210 07:09:19.001277 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2" containerName="keystone-bootstrap" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.001294 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2" containerName="keystone-bootstrap" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.001493 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2" containerName="keystone-bootstrap" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.002233 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerStarted","Data":"d3c7fe75c0f925fb125a734c029f2059bdb6fd265030817676b5b3b8a052fd9b"} Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.003228 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.006343 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-75b8c6446d-lf487" event={"ID":"5cbf2f96-d196-413b-841a-9b753e6beae2","Type":"ContainerStarted","Data":"ce9ab5a6f376ea6cb9dea6d80fddc10af613352a67ef7f387fe0f4517b12d4bd"} Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.012426 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.028970 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.029147 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-586f9fc866-46mgw"] Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.029229 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.029251 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dsf6c" event={"ID":"6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2","Type":"ContainerDied","Data":"7f7f067b82553823093852f28ab985c590968275a535b58c6f915b31ecdbe22d"} Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.029285 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f7f067b82553823093852f28ab985c590968275a535b58c6f915b31ecdbe22d" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.029690 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dsf6c" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.156666 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-fernet-keys\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.156844 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-combined-ca-bundle\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.157294 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-scripts\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.157330 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-internal-tls-certs\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.157356 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lp226\" (UniqueName: \"kubernetes.io/projected/857ecb8b-72dc-40a7-a407-d85c40c40bcf-kube-api-access-lp226\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.157464 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-credential-keys\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.157530 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-config-data\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.157661 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-public-tls-certs\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.260002 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-credential-keys\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.260117 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-config-data\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.260166 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-public-tls-certs\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.260220 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-fernet-keys\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.260254 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-combined-ca-bundle\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.260358 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-scripts\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.260383 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-internal-tls-certs\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.260413 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lp226\" (UniqueName: \"kubernetes.io/projected/857ecb8b-72dc-40a7-a407-d85c40c40bcf-kube-api-access-lp226\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.265652 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-scripts\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.266287 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-public-tls-certs\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.267610 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-credential-keys\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.269620 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-config-data\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.269639 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-internal-tls-certs\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.270466 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-combined-ca-bundle\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.276202 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-fernet-keys\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.279906 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lp226\" (UniqueName: \"kubernetes.io/projected/857ecb8b-72dc-40a7-a407-d85c40c40bcf-kube-api-access-lp226\") pod \"keystone-586f9fc866-46mgw\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:19 crc kubenswrapper[4765]: I1210 07:09:19.380800 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:20 crc kubenswrapper[4765]: I1210 07:09:20.080683 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-75b8c6446d-lf487" event={"ID":"5cbf2f96-d196-413b-841a-9b753e6beae2","Type":"ContainerStarted","Data":"0ec9efaecee0820558e3dfac5cce87e629fece52ce9474ad4a6e8484c9d7a6fb"} Dec 10 07:09:20 crc kubenswrapper[4765]: I1210 07:09:20.123260 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-794648c68d-ljskq_cc86d6d5-ff4f-4d50-9587-b6661e7ac16f/neutron-httpd/0.log" Dec 10 07:09:20 crc kubenswrapper[4765]: I1210 07:09:20.124256 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-794648c68d-ljskq" event={"ID":"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f","Type":"ContainerStarted","Data":"2a7fe746dd0cd6907687a4c13d8002851f97a21e2e6d44b100c7473a80e2ba98"} Dec 10 07:09:20 crc kubenswrapper[4765]: I1210 07:09:20.124976 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:20 crc kubenswrapper[4765]: I1210 07:09:20.141553 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-586f9fc866-46mgw"] Dec 10 07:09:21 crc kubenswrapper[4765]: I1210 07:09:21.213545 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-794648c68d-ljskq_cc86d6d5-ff4f-4d50-9587-b6661e7ac16f/neutron-httpd/1.log" Dec 10 07:09:21 crc kubenswrapper[4765]: I1210 07:09:21.216887 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-794648c68d-ljskq_cc86d6d5-ff4f-4d50-9587-b6661e7ac16f/neutron-httpd/0.log" Dec 10 07:09:21 crc kubenswrapper[4765]: I1210 07:09:21.220317 4765 generic.go:334] "Generic (PLEG): container finished" podID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" containerID="2a7fe746dd0cd6907687a4c13d8002851f97a21e2e6d44b100c7473a80e2ba98" exitCode=1 Dec 10 07:09:21 crc kubenswrapper[4765]: I1210 07:09:21.220391 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-794648c68d-ljskq" event={"ID":"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f","Type":"ContainerDied","Data":"2a7fe746dd0cd6907687a4c13d8002851f97a21e2e6d44b100c7473a80e2ba98"} Dec 10 07:09:21 crc kubenswrapper[4765]: I1210 07:09:21.220446 4765 scope.go:117] "RemoveContainer" containerID="c850c3922852a2096a591eb3bb5de7aa04e687acbf76ab0e6f10260559ce7766" Dec 10 07:09:21 crc kubenswrapper[4765]: I1210 07:09:21.222778 4765 scope.go:117] "RemoveContainer" containerID="2a7fe746dd0cd6907687a4c13d8002851f97a21e2e6d44b100c7473a80e2ba98" Dec 10 07:09:21 crc kubenswrapper[4765]: E1210 07:09:21.223079 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"neutron-httpd\" with CrashLoopBackOff: \"back-off 10s restarting failed container=neutron-httpd pod=neutron-794648c68d-ljskq_openstack(cc86d6d5-ff4f-4d50-9587-b6661e7ac16f)\"" pod="openstack/neutron-794648c68d-ljskq" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" Dec 10 07:09:21 crc kubenswrapper[4765]: I1210 07:09:21.404530 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 10 07:09:21 crc kubenswrapper[4765]: I1210 07:09:21.407543 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 10 07:09:21 crc kubenswrapper[4765]: I1210 07:09:21.750325 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 10 07:09:21 crc kubenswrapper[4765]: I1210 07:09:21.750403 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 10 07:09:21 crc kubenswrapper[4765]: I1210 07:09:21.799510 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 10 07:09:21 crc kubenswrapper[4765]: I1210 07:09:21.804019 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 10 07:09:22 crc kubenswrapper[4765]: I1210 07:09:22.232904 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 10 07:09:22 crc kubenswrapper[4765]: I1210 07:09:22.232962 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 10 07:09:22 crc kubenswrapper[4765]: I1210 07:09:22.240577 4765 scope.go:117] "RemoveContainer" containerID="2a7fe746dd0cd6907687a4c13d8002851f97a21e2e6d44b100c7473a80e2ba98" Dec 10 07:09:22 crc kubenswrapper[4765]: E1210 07:09:22.244063 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"neutron-httpd\" with CrashLoopBackOff: \"back-off 10s restarting failed container=neutron-httpd pod=neutron-794648c68d-ljskq_openstack(cc86d6d5-ff4f-4d50-9587-b6661e7ac16f)\"" pod="openstack/neutron-794648c68d-ljskq" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" Dec 10 07:09:23 crc kubenswrapper[4765]: I1210 07:09:23.021228 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:23 crc kubenswrapper[4765]: I1210 07:09:23.087390 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74c95c887-pbmt2"] Dec 10 07:09:23 crc kubenswrapper[4765]: I1210 07:09:23.087650 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74c95c887-pbmt2" podUID="fc44a504-e791-44c7-afc8-77d11d77b7a4" containerName="dnsmasq-dns" containerID="cri-o://fcb34f95dac424c94080257451e042ae316feded1c01ee570341b28ae870a57d" gracePeriod=10 Dec 10 07:09:23 crc kubenswrapper[4765]: I1210 07:09:23.258872 4765 generic.go:334] "Generic (PLEG): container finished" podID="fc44a504-e791-44c7-afc8-77d11d77b7a4" containerID="fcb34f95dac424c94080257451e042ae316feded1c01ee570341b28ae870a57d" exitCode=0 Dec 10 07:09:23 crc kubenswrapper[4765]: I1210 07:09:23.259176 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74c95c887-pbmt2" event={"ID":"fc44a504-e791-44c7-afc8-77d11d77b7a4","Type":"ContainerDied","Data":"fcb34f95dac424c94080257451e042ae316feded1c01ee570341b28ae870a57d"} Dec 10 07:09:24 crc kubenswrapper[4765]: I1210 07:09:24.868212 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 10 07:09:24 crc kubenswrapper[4765]: I1210 07:09:24.869253 4765 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 10 07:09:24 crc kubenswrapper[4765]: I1210 07:09:24.940302 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 10 07:09:25 crc kubenswrapper[4765]: W1210 07:09:25.658023 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod857ecb8b_72dc_40a7_a407_d85c40c40bcf.slice/crio-4c350804e5a686bb826e970e780fb941f9e301cf2128273ca21fedd769bb5ee6 WatchSource:0}: Error finding container 4c350804e5a686bb826e970e780fb941f9e301cf2128273ca21fedd769bb5ee6: Status 404 returned error can't find the container with id 4c350804e5a686bb826e970e780fb941f9e301cf2128273ca21fedd769bb5ee6 Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.317493 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.324199 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-794648c68d-ljskq_cc86d6d5-ff4f-4d50-9587-b6661e7ac16f/neutron-httpd/1.log" Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.350281 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-586f9fc866-46mgw" event={"ID":"857ecb8b-72dc-40a7-a407-d85c40c40bcf","Type":"ContainerStarted","Data":"97e9fc4165ac8704c4c0452ae52b6847fb82f3a8bea1c0cd166df2f7ea247752"} Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.350341 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-586f9fc866-46mgw" event={"ID":"857ecb8b-72dc-40a7-a407-d85c40c40bcf","Type":"ContainerStarted","Data":"4c350804e5a686bb826e970e780fb941f9e301cf2128273ca21fedd769bb5ee6"} Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.350460 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.409468 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74c95c887-pbmt2" event={"ID":"fc44a504-e791-44c7-afc8-77d11d77b7a4","Type":"ContainerDied","Data":"b4ab5626ede4fd84840437858a78efe21a655c6f6dd6e125a2714aba6fddcc0c"} Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.409789 4765 scope.go:117] "RemoveContainer" containerID="fcb34f95dac424c94080257451e042ae316feded1c01ee570341b28ae870a57d" Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.410051 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74c95c887-pbmt2" Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.454421 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-586f9fc866-46mgw" podStartSLOduration=8.454393058 podStartE2EDuration="8.454393058s" podCreationTimestamp="2025-12-10 07:09:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:09:26.451115535 +0000 UTC m=+1286.177780871" watchObservedRunningTime="2025-12-10 07:09:26.454393058 +0000 UTC m=+1286.181058404" Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.528805 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvzh6\" (UniqueName: \"kubernetes.io/projected/fc44a504-e791-44c7-afc8-77d11d77b7a4-kube-api-access-cvzh6\") pod \"fc44a504-e791-44c7-afc8-77d11d77b7a4\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.529139 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-dns-svc\") pod \"fc44a504-e791-44c7-afc8-77d11d77b7a4\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.529414 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-config\") pod \"fc44a504-e791-44c7-afc8-77d11d77b7a4\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.529506 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-ovsdbserver-nb\") pod \"fc44a504-e791-44c7-afc8-77d11d77b7a4\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.529598 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-ovsdbserver-sb\") pod \"fc44a504-e791-44c7-afc8-77d11d77b7a4\" (UID: \"fc44a504-e791-44c7-afc8-77d11d77b7a4\") " Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.556844 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc44a504-e791-44c7-afc8-77d11d77b7a4-kube-api-access-cvzh6" (OuterVolumeSpecName: "kube-api-access-cvzh6") pod "fc44a504-e791-44c7-afc8-77d11d77b7a4" (UID: "fc44a504-e791-44c7-afc8-77d11d77b7a4"). InnerVolumeSpecName "kube-api-access-cvzh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.632040 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvzh6\" (UniqueName: \"kubernetes.io/projected/fc44a504-e791-44c7-afc8-77d11d77b7a4-kube-api-access-cvzh6\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.645078 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-config" (OuterVolumeSpecName: "config") pod "fc44a504-e791-44c7-afc8-77d11d77b7a4" (UID: "fc44a504-e791-44c7-afc8-77d11d77b7a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.645135 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fc44a504-e791-44c7-afc8-77d11d77b7a4" (UID: "fc44a504-e791-44c7-afc8-77d11d77b7a4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.653657 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fc44a504-e791-44c7-afc8-77d11d77b7a4" (UID: "fc44a504-e791-44c7-afc8-77d11d77b7a4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.660487 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fc44a504-e791-44c7-afc8-77d11d77b7a4" (UID: "fc44a504-e791-44c7-afc8-77d11d77b7a4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.734652 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.734717 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.734734 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.734747 4765 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fc44a504-e791-44c7-afc8-77d11d77b7a4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.779136 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74c95c887-pbmt2"] Dec 10 07:09:26 crc kubenswrapper[4765]: I1210 07:09:26.787811 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74c95c887-pbmt2"] Dec 10 07:09:27 crc kubenswrapper[4765]: I1210 07:09:27.043783 4765 scope.go:117] "RemoveContainer" containerID="c78fba49464f6ea13dc04f9a4d0e76a90b7234c09df7beef6f3d8d29f5e5ba94" Dec 10 07:09:27 crc kubenswrapper[4765]: I1210 07:09:27.428456 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2tnf4" event={"ID":"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb","Type":"ContainerStarted","Data":"bb23aa896aeaa13a3a3aad41ff5bdd9ccdddda01059437a0f632a5e3836f4ccb"} Dec 10 07:09:27 crc kubenswrapper[4765]: I1210 07:09:27.433172 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a408295-9dcc-4bde-8f4c-019bc7585479","Type":"ContainerStarted","Data":"a32f5b56b48c2c1996593a41f8396d78946d83b91bd4f7465cb4be1119bc11a0"} Dec 10 07:09:27 crc kubenswrapper[4765]: I1210 07:09:27.455776 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-2tnf4" podStartSLOduration=3.415498447 podStartE2EDuration="42.45574251s" podCreationTimestamp="2025-12-10 07:08:45 +0000 UTC" firstStartedPulling="2025-12-10 07:08:46.995870503 +0000 UTC m=+1246.722535819" lastFinishedPulling="2025-12-10 07:09:26.036114566 +0000 UTC m=+1285.762779882" observedRunningTime="2025-12-10 07:09:27.454606947 +0000 UTC m=+1287.181272263" watchObservedRunningTime="2025-12-10 07:09:27.45574251 +0000 UTC m=+1287.182407826" Dec 10 07:09:27 crc kubenswrapper[4765]: I1210 07:09:27.484591 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerStarted","Data":"7135e81e69285b42d0ac4c07ee634eaa2b3499547717a42a6aa67314ff49099f"} Dec 10 07:09:27 crc kubenswrapper[4765]: I1210 07:09:27.494736 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-75b8c6446d-lf487" event={"ID":"5cbf2f96-d196-413b-841a-9b753e6beae2","Type":"ContainerStarted","Data":"782ba5c041f2d0683f31e70ec7c7cd0a1637f3847ec96fe8a10620030fff50a0"} Dec 10 07:09:27 crc kubenswrapper[4765]: I1210 07:09:27.495490 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:27 crc kubenswrapper[4765]: I1210 07:09:27.497468 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:27 crc kubenswrapper[4765]: I1210 07:09:27.526076 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-75b8c6446d-lf487" podStartSLOduration=11.526051965 podStartE2EDuration="11.526051965s" podCreationTimestamp="2025-12-10 07:09:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:09:27.525380206 +0000 UTC m=+1287.252045552" watchObservedRunningTime="2025-12-10 07:09:27.526051965 +0000 UTC m=+1287.252717281" Dec 10 07:09:28 crc kubenswrapper[4765]: I1210 07:09:28.511498 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-bxqxj" event={"ID":"46580548-0bb0-4026-821b-2ee72fc56f70","Type":"ContainerStarted","Data":"e5112d7593a89cdc84e6fce91dc43492c0ab4b3e753938520601247a72b74152"} Dec 10 07:09:28 crc kubenswrapper[4765]: I1210 07:09:28.526902 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerStarted","Data":"8a125681c2edc8966797c35fe861ce37d733c183b5ba7c09434cf876cdca5f73"} Dec 10 07:09:28 crc kubenswrapper[4765]: I1210 07:09:28.526967 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerStarted","Data":"9cd9b93d1dbfce955b210d123285bdd2fbb9f7b231672a4aad54ede9927aaae9"} Dec 10 07:09:28 crc kubenswrapper[4765]: I1210 07:09:28.526983 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerStarted","Data":"4594560a05f9217a4d5f8e88170509508194fe66c2914b7f958548a105104d7e"} Dec 10 07:09:28 crc kubenswrapper[4765]: I1210 07:09:28.549302 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-bxqxj" podStartSLOduration=5.141555815 podStartE2EDuration="44.549263408s" podCreationTimestamp="2025-12-10 07:08:44 +0000 UTC" firstStartedPulling="2025-12-10 07:08:46.626582031 +0000 UTC m=+1246.353247347" lastFinishedPulling="2025-12-10 07:09:26.034289634 +0000 UTC m=+1285.760954940" observedRunningTime="2025-12-10 07:09:28.539072858 +0000 UTC m=+1288.265738174" watchObservedRunningTime="2025-12-10 07:09:28.549263408 +0000 UTC m=+1288.275928724" Dec 10 07:09:28 crc kubenswrapper[4765]: I1210 07:09:28.600827 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc44a504-e791-44c7-afc8-77d11d77b7a4" path="/var/lib/kubelet/pods/fc44a504-e791-44c7-afc8-77d11d77b7a4/volumes" Dec 10 07:09:28 crc kubenswrapper[4765]: I1210 07:09:28.809038 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:30 crc kubenswrapper[4765]: I1210 07:09:30.591306 4765 generic.go:334] "Generic (PLEG): container finished" podID="c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb" containerID="bb23aa896aeaa13a3a3aad41ff5bdd9ccdddda01059437a0f632a5e3836f4ccb" exitCode=0 Dec 10 07:09:30 crc kubenswrapper[4765]: I1210 07:09:30.598710 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2tnf4" event={"ID":"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb","Type":"ContainerDied","Data":"bb23aa896aeaa13a3a3aad41ff5bdd9ccdddda01059437a0f632a5e3836f4ccb"} Dec 10 07:09:30 crc kubenswrapper[4765]: I1210 07:09:30.605368 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerStarted","Data":"47cf7a985566866c36c60822d4497071985ac369da2fd01616c41dc5a2b1e19f"} Dec 10 07:09:30 crc kubenswrapper[4765]: I1210 07:09:30.605415 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerStarted","Data":"cc486fba5de7c78709592945f772f250acef64e3e50fa5a5b6a3fdbafcbf607a"} Dec 10 07:09:30 crc kubenswrapper[4765]: I1210 07:09:30.605430 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerStarted","Data":"f35d3757711e456d6892f446dd4774ffaa8413dd814648075fe8bf69e3089c63"} Dec 10 07:09:30 crc kubenswrapper[4765]: I1210 07:09:30.605478 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerStarted","Data":"d17735ab09308dc802722eda2a476a123f866d58e68d758cd7b67b2673a16eae"} Dec 10 07:09:31 crc kubenswrapper[4765]: I1210 07:09:31.121847 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74c95c887-pbmt2" podUID="fc44a504-e791-44c7-afc8-77d11d77b7a4" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.144:5353: i/o timeout" Dec 10 07:09:31 crc kubenswrapper[4765]: I1210 07:09:31.629302 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerStarted","Data":"349bdf95834cb9f842e6b4c1d3638b41041514194cbcb5b970b43c4f5905a101"} Dec 10 07:09:33 crc kubenswrapper[4765]: I1210 07:09:33.660435 4765 generic.go:334] "Generic (PLEG): container finished" podID="46580548-0bb0-4026-821b-2ee72fc56f70" containerID="e5112d7593a89cdc84e6fce91dc43492c0ab4b3e753938520601247a72b74152" exitCode=0 Dec 10 07:09:33 crc kubenswrapper[4765]: I1210 07:09:33.660506 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-bxqxj" event={"ID":"46580548-0bb0-4026-821b-2ee72fc56f70","Type":"ContainerDied","Data":"e5112d7593a89cdc84e6fce91dc43492c0ab4b3e753938520601247a72b74152"} Dec 10 07:09:34 crc kubenswrapper[4765]: I1210 07:09:34.323821 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2tnf4" Dec 10 07:09:34 crc kubenswrapper[4765]: I1210 07:09:34.435182 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mk92c\" (UniqueName: \"kubernetes.io/projected/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb-kube-api-access-mk92c\") pod \"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb\" (UID: \"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb\") " Dec 10 07:09:34 crc kubenswrapper[4765]: I1210 07:09:34.435309 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb-db-sync-config-data\") pod \"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb\" (UID: \"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb\") " Dec 10 07:09:34 crc kubenswrapper[4765]: I1210 07:09:34.435599 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb-combined-ca-bundle\") pod \"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb\" (UID: \"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb\") " Dec 10 07:09:34 crc kubenswrapper[4765]: I1210 07:09:34.444191 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb" (UID: "c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:34 crc kubenswrapper[4765]: I1210 07:09:34.449131 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb-kube-api-access-mk92c" (OuterVolumeSpecName: "kube-api-access-mk92c") pod "c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb" (UID: "c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb"). InnerVolumeSpecName "kube-api-access-mk92c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:09:34 crc kubenswrapper[4765]: I1210 07:09:34.471382 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb" (UID: "c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:34 crc kubenswrapper[4765]: I1210 07:09:34.538496 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:34 crc kubenswrapper[4765]: I1210 07:09:34.538546 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mk92c\" (UniqueName: \"kubernetes.io/projected/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb-kube-api-access-mk92c\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:34 crc kubenswrapper[4765]: I1210 07:09:34.538564 4765 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:34 crc kubenswrapper[4765]: I1210 07:09:34.676140 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2tnf4" event={"ID":"c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb","Type":"ContainerDied","Data":"46e375e7e6d448485079afc2eaebe068ba737e8a04601aeb456cd6ab1e9cf8db"} Dec 10 07:09:34 crc kubenswrapper[4765]: I1210 07:09:34.676209 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46e375e7e6d448485079afc2eaebe068ba737e8a04601aeb456cd6ab1e9cf8db" Dec 10 07:09:34 crc kubenswrapper[4765]: I1210 07:09:34.676357 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2tnf4" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.007410 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.050254 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-scripts\") pod \"46580548-0bb0-4026-821b-2ee72fc56f70\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.050340 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-db-sync-config-data\") pod \"46580548-0bb0-4026-821b-2ee72fc56f70\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.050385 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-combined-ca-bundle\") pod \"46580548-0bb0-4026-821b-2ee72fc56f70\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.059688 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "46580548-0bb0-4026-821b-2ee72fc56f70" (UID: "46580548-0bb0-4026-821b-2ee72fc56f70"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.064452 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-scripts" (OuterVolumeSpecName: "scripts") pod "46580548-0bb0-4026-821b-2ee72fc56f70" (UID: "46580548-0bb0-4026-821b-2ee72fc56f70"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.084484 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "46580548-0bb0-4026-821b-2ee72fc56f70" (UID: "46580548-0bb0-4026-821b-2ee72fc56f70"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.151876 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/46580548-0bb0-4026-821b-2ee72fc56f70-etc-machine-id\") pod \"46580548-0bb0-4026-821b-2ee72fc56f70\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.151987 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-config-data\") pod \"46580548-0bb0-4026-821b-2ee72fc56f70\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.152021 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ml6kc\" (UniqueName: \"kubernetes.io/projected/46580548-0bb0-4026-821b-2ee72fc56f70-kube-api-access-ml6kc\") pod \"46580548-0bb0-4026-821b-2ee72fc56f70\" (UID: \"46580548-0bb0-4026-821b-2ee72fc56f70\") " Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.152631 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.152650 4765 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.152664 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.153212 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46580548-0bb0-4026-821b-2ee72fc56f70-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "46580548-0bb0-4026-821b-2ee72fc56f70" (UID: "46580548-0bb0-4026-821b-2ee72fc56f70"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.158008 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46580548-0bb0-4026-821b-2ee72fc56f70-kube-api-access-ml6kc" (OuterVolumeSpecName: "kube-api-access-ml6kc") pod "46580548-0bb0-4026-821b-2ee72fc56f70" (UID: "46580548-0bb0-4026-821b-2ee72fc56f70"). InnerVolumeSpecName "kube-api-access-ml6kc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.207047 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-config-data" (OuterVolumeSpecName: "config-data") pod "46580548-0bb0-4026-821b-2ee72fc56f70" (UID: "46580548-0bb0-4026-821b-2ee72fc56f70"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.254959 4765 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/46580548-0bb0-4026-821b-2ee72fc56f70-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.255000 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46580548-0bb0-4026-821b-2ee72fc56f70-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.255018 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ml6kc\" (UniqueName: \"kubernetes.io/projected/46580548-0bb0-4026-821b-2ee72fc56f70-kube-api-access-ml6kc\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.647318 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-66fcc55b75-8hcl6"] Dec 10 07:09:35 crc kubenswrapper[4765]: E1210 07:09:35.647718 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc44a504-e791-44c7-afc8-77d11d77b7a4" containerName="init" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.647734 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc44a504-e791-44c7-afc8-77d11d77b7a4" containerName="init" Dec 10 07:09:35 crc kubenswrapper[4765]: E1210 07:09:35.647764 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb" containerName="barbican-db-sync" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.647770 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb" containerName="barbican-db-sync" Dec 10 07:09:35 crc kubenswrapper[4765]: E1210 07:09:35.647788 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46580548-0bb0-4026-821b-2ee72fc56f70" containerName="cinder-db-sync" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.647794 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="46580548-0bb0-4026-821b-2ee72fc56f70" containerName="cinder-db-sync" Dec 10 07:09:35 crc kubenswrapper[4765]: E1210 07:09:35.647808 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc44a504-e791-44c7-afc8-77d11d77b7a4" containerName="dnsmasq-dns" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.647814 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc44a504-e791-44c7-afc8-77d11d77b7a4" containerName="dnsmasq-dns" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.647995 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="46580548-0bb0-4026-821b-2ee72fc56f70" containerName="cinder-db-sync" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.648014 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc44a504-e791-44c7-afc8-77d11d77b7a4" containerName="dnsmasq-dns" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.648026 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb" containerName="barbican-db-sync" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.649178 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.652682 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.652922 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-cc9wl" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.657431 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.663951 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-66fcc55b75-8hcl6"] Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.667442 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/865e0a69-0d85-4d93-9d38-f52449d09d87-config-data-custom\") pod \"barbican-worker-66fcc55b75-8hcl6\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.667493 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/865e0a69-0d85-4d93-9d38-f52449d09d87-combined-ca-bundle\") pod \"barbican-worker-66fcc55b75-8hcl6\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.667519 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/865e0a69-0d85-4d93-9d38-f52449d09d87-logs\") pod \"barbican-worker-66fcc55b75-8hcl6\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.667540 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77nlk\" (UniqueName: \"kubernetes.io/projected/865e0a69-0d85-4d93-9d38-f52449d09d87-kube-api-access-77nlk\") pod \"barbican-worker-66fcc55b75-8hcl6\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.667571 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/865e0a69-0d85-4d93-9d38-f52449d09d87-config-data\") pod \"barbican-worker-66fcc55b75-8hcl6\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.715029 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84d886847-svlv6"] Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.715825 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerName="ceilometer-central-agent" containerID="cri-o://9a4040ee2b199024eab0c4287d221a7658920bcd1daca0b3e1d75a6f207395c4" gracePeriod=30 Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.716224 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerName="proxy-httpd" containerID="cri-o://3aebb2006de814ae053878be97758c7cf1c7d0b448e0c6cb907a1636fb213e60" gracePeriod=30 Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.716301 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerName="sg-core" containerID="cri-o://a32f5b56b48c2c1996593a41f8396d78946d83b91bd4f7465cb4be1119bc11a0" gracePeriod=30 Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.716330 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerName="ceilometer-notification-agent" containerID="cri-o://3bd7b57c49d8c40349721b3f40f74a16d2e7f0d15aaef2b718bb938bddc3df91" gracePeriod=30 Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.717612 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.717655 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a408295-9dcc-4bde-8f4c-019bc7585479","Type":"ContainerStarted","Data":"3aebb2006de814ae053878be97758c7cf1c7d0b448e0c6cb907a1636fb213e60"} Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.717813 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.745157 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-7756b4f44b-9575x"] Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.747144 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.750637 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.753277 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerStarted","Data":"b867a8d1fd276cf779b269125911d71fbdc4fda5701cf8688d4e1b310044e049"} Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.753315 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerStarted","Data":"39226f8cc8a03930bd902803a8b4288d6032977d3c35db4259b0445250e1e7b2"} Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.758251 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-bxqxj" event={"ID":"46580548-0bb0-4026-821b-2ee72fc56f70","Type":"ContainerDied","Data":"7018405f7fabdcffe58b847b6244611a4a7ff50430b347b085d4db5014961a15"} Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.758292 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7018405f7fabdcffe58b847b6244611a4a7ff50430b347b085d4db5014961a15" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.758354 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-bxqxj" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.770592 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/865e0a69-0d85-4d93-9d38-f52449d09d87-config-data-custom\") pod \"barbican-worker-66fcc55b75-8hcl6\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.770644 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/865e0a69-0d85-4d93-9d38-f52449d09d87-combined-ca-bundle\") pod \"barbican-worker-66fcc55b75-8hcl6\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.770673 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/865e0a69-0d85-4d93-9d38-f52449d09d87-logs\") pod \"barbican-worker-66fcc55b75-8hcl6\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.770694 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77nlk\" (UniqueName: \"kubernetes.io/projected/865e0a69-0d85-4d93-9d38-f52449d09d87-kube-api-access-77nlk\") pod \"barbican-worker-66fcc55b75-8hcl6\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.770730 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/865e0a69-0d85-4d93-9d38-f52449d09d87-config-data\") pod \"barbican-worker-66fcc55b75-8hcl6\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.771699 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/865e0a69-0d85-4d93-9d38-f52449d09d87-logs\") pod \"barbican-worker-66fcc55b75-8hcl6\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.776876 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/865e0a69-0d85-4d93-9d38-f52449d09d87-config-data\") pod \"barbican-worker-66fcc55b75-8hcl6\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.796875 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/865e0a69-0d85-4d93-9d38-f52449d09d87-combined-ca-bundle\") pod \"barbican-worker-66fcc55b75-8hcl6\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.801012 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/865e0a69-0d85-4d93-9d38-f52449d09d87-config-data-custom\") pod \"barbican-worker-66fcc55b75-8hcl6\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.822887 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77nlk\" (UniqueName: \"kubernetes.io/projected/865e0a69-0d85-4d93-9d38-f52449d09d87-kube-api-access-77nlk\") pod \"barbican-worker-66fcc55b75-8hcl6\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.872985 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-ovsdbserver-nb\") pod \"dnsmasq-dns-84d886847-svlv6\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.873097 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/367a49cf-488a-4852-8728-78dacbfbd500-config-data\") pod \"barbican-keystone-listener-7756b4f44b-9575x\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.873130 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nt8jg\" (UniqueName: \"kubernetes.io/projected/367a49cf-488a-4852-8728-78dacbfbd500-kube-api-access-nt8jg\") pod \"barbican-keystone-listener-7756b4f44b-9575x\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.873223 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfv9h\" (UniqueName: \"kubernetes.io/projected/60534b9b-5f03-4d05-91ff-68e6f141ecc3-kube-api-access-cfv9h\") pod \"dnsmasq-dns-84d886847-svlv6\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.873272 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/367a49cf-488a-4852-8728-78dacbfbd500-config-data-custom\") pod \"barbican-keystone-listener-7756b4f44b-9575x\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.873302 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/367a49cf-488a-4852-8728-78dacbfbd500-combined-ca-bundle\") pod \"barbican-keystone-listener-7756b4f44b-9575x\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.873331 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-ovsdbserver-sb\") pod \"dnsmasq-dns-84d886847-svlv6\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.873375 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/367a49cf-488a-4852-8728-78dacbfbd500-logs\") pod \"barbican-keystone-listener-7756b4f44b-9575x\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.873419 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-dns-svc\") pod \"dnsmasq-dns-84d886847-svlv6\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.873451 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-config\") pod \"dnsmasq-dns-84d886847-svlv6\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.876509 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84d886847-svlv6"] Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.888517 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.740329934 podStartE2EDuration="51.888481669s" podCreationTimestamp="2025-12-10 07:08:44 +0000 UTC" firstStartedPulling="2025-12-10 07:08:46.7723872 +0000 UTC m=+1246.499052526" lastFinishedPulling="2025-12-10 07:09:34.920538945 +0000 UTC m=+1294.647204261" observedRunningTime="2025-12-10 07:09:35.780395901 +0000 UTC m=+1295.507061217" watchObservedRunningTime="2025-12-10 07:09:35.888481669 +0000 UTC m=+1295.615146985" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.916233 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7756b4f44b-9575x"] Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.933294 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=157.416985004 podStartE2EDuration="2m53.93325901s" podCreationTimestamp="2025-12-10 07:06:42 +0000 UTC" firstStartedPulling="2025-12-10 07:09:13.109673491 +0000 UTC m=+1272.836338807" lastFinishedPulling="2025-12-10 07:09:29.625947497 +0000 UTC m=+1289.352612813" observedRunningTime="2025-12-10 07:09:35.889676253 +0000 UTC m=+1295.616341579" watchObservedRunningTime="2025-12-10 07:09:35.93325901 +0000 UTC m=+1295.659924326" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.968471 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.974914 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-config\") pod \"dnsmasq-dns-84d886847-svlv6\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.974983 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-ovsdbserver-nb\") pod \"dnsmasq-dns-84d886847-svlv6\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.975031 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/367a49cf-488a-4852-8728-78dacbfbd500-config-data\") pod \"barbican-keystone-listener-7756b4f44b-9575x\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.975078 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nt8jg\" (UniqueName: \"kubernetes.io/projected/367a49cf-488a-4852-8728-78dacbfbd500-kube-api-access-nt8jg\") pod \"barbican-keystone-listener-7756b4f44b-9575x\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.975165 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfv9h\" (UniqueName: \"kubernetes.io/projected/60534b9b-5f03-4d05-91ff-68e6f141ecc3-kube-api-access-cfv9h\") pod \"dnsmasq-dns-84d886847-svlv6\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.975193 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/367a49cf-488a-4852-8728-78dacbfbd500-config-data-custom\") pod \"barbican-keystone-listener-7756b4f44b-9575x\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.975217 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/367a49cf-488a-4852-8728-78dacbfbd500-combined-ca-bundle\") pod \"barbican-keystone-listener-7756b4f44b-9575x\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.975241 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-ovsdbserver-sb\") pod \"dnsmasq-dns-84d886847-svlv6\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.975275 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/367a49cf-488a-4852-8728-78dacbfbd500-logs\") pod \"barbican-keystone-listener-7756b4f44b-9575x\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.975296 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-dns-svc\") pod \"dnsmasq-dns-84d886847-svlv6\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.978567 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-config\") pod \"dnsmasq-dns-84d886847-svlv6\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.978773 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-dns-svc\") pod \"dnsmasq-dns-84d886847-svlv6\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.979153 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/367a49cf-488a-4852-8728-78dacbfbd500-logs\") pod \"barbican-keystone-listener-7756b4f44b-9575x\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.979304 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-ovsdbserver-nb\") pod \"dnsmasq-dns-84d886847-svlv6\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.979524 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-ovsdbserver-sb\") pod \"dnsmasq-dns-84d886847-svlv6\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.981420 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-679d966d56-j58pb"] Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.985560 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/367a49cf-488a-4852-8728-78dacbfbd500-config-data\") pod \"barbican-keystone-listener-7756b4f44b-9575x\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.989590 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/367a49cf-488a-4852-8728-78dacbfbd500-config-data-custom\") pod \"barbican-keystone-listener-7756b4f44b-9575x\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.990232 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/367a49cf-488a-4852-8728-78dacbfbd500-combined-ca-bundle\") pod \"barbican-keystone-listener-7756b4f44b-9575x\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:09:35 crc kubenswrapper[4765]: I1210 07:09:35.983689 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.000033 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.006920 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfv9h\" (UniqueName: \"kubernetes.io/projected/60534b9b-5f03-4d05-91ff-68e6f141ecc3-kube-api-access-cfv9h\") pod \"dnsmasq-dns-84d886847-svlv6\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.012335 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-679d966d56-j58pb"] Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.020962 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nt8jg\" (UniqueName: \"kubernetes.io/projected/367a49cf-488a-4852-8728-78dacbfbd500-kube-api-access-nt8jg\") pod \"barbican-keystone-listener-7756b4f44b-9575x\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.057100 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.085187 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fthk\" (UniqueName: \"kubernetes.io/projected/d3683f1d-a6e8-4762-abad-02773d41261a-kube-api-access-6fthk\") pod \"barbican-api-679d966d56-j58pb\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.085282 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3683f1d-a6e8-4762-abad-02773d41261a-combined-ca-bundle\") pod \"barbican-api-679d966d56-j58pb\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.085310 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3683f1d-a6e8-4762-abad-02773d41261a-config-data\") pod \"barbican-api-679d966d56-j58pb\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.085381 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d3683f1d-a6e8-4762-abad-02773d41261a-config-data-custom\") pod \"barbican-api-679d966d56-j58pb\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.085402 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3683f1d-a6e8-4762-abad-02773d41261a-logs\") pod \"barbican-api-679d966d56-j58pb\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.098621 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.101971 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.108423 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.108618 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.109313 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-74px2" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.109525 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.168855 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.190649 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fthk\" (UniqueName: \"kubernetes.io/projected/d3683f1d-a6e8-4762-abad-02773d41261a-kube-api-access-6fthk\") pod \"barbican-api-679d966d56-j58pb\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.190735 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-config-data\") pod \"cinder-scheduler-0\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.191902 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6j6t5\" (UniqueName: \"kubernetes.io/projected/13622500-18a7-45ab-9d6f-89b94db99e1c-kube-api-access-6j6t5\") pod \"cinder-scheduler-0\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.192064 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3683f1d-a6e8-4762-abad-02773d41261a-combined-ca-bundle\") pod \"barbican-api-679d966d56-j58pb\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.192127 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3683f1d-a6e8-4762-abad-02773d41261a-config-data\") pod \"barbican-api-679d966d56-j58pb\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.194948 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.195348 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-scripts\") pod \"cinder-scheduler-0\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.195417 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/13622500-18a7-45ab-9d6f-89b94db99e1c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.195461 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d3683f1d-a6e8-4762-abad-02773d41261a-config-data-custom\") pod \"barbican-api-679d966d56-j58pb\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.195497 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3683f1d-a6e8-4762-abad-02773d41261a-logs\") pod \"barbican-api-679d966d56-j58pb\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.195528 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.212299 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d3683f1d-a6e8-4762-abad-02773d41261a-config-data-custom\") pod \"barbican-api-679d966d56-j58pb\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.216497 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3683f1d-a6e8-4762-abad-02773d41261a-logs\") pod \"barbican-api-679d966d56-j58pb\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.222287 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.259715 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3683f1d-a6e8-4762-abad-02773d41261a-combined-ca-bundle\") pod \"barbican-api-679d966d56-j58pb\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.266461 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fthk\" (UniqueName: \"kubernetes.io/projected/d3683f1d-a6e8-4762-abad-02773d41261a-kube-api-access-6fthk\") pod \"barbican-api-679d966d56-j58pb\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.266714 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3683f1d-a6e8-4762-abad-02773d41261a-config-data\") pod \"barbican-api-679d966d56-j58pb\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.298418 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-scripts\") pod \"cinder-scheduler-0\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.320916 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/13622500-18a7-45ab-9d6f-89b94db99e1c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.321133 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.324599 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-config-data\") pod \"cinder-scheduler-0\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.324775 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6j6t5\" (UniqueName: \"kubernetes.io/projected/13622500-18a7-45ab-9d6f-89b94db99e1c-kube-api-access-6j6t5\") pod \"cinder-scheduler-0\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.324920 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-scripts\") pod \"cinder-scheduler-0\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.325071 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/13622500-18a7-45ab-9d6f-89b94db99e1c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.325707 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.339910 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.355911 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84d886847-svlv6"] Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.506110 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.508311 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6j6t5\" (UniqueName: \"kubernetes.io/projected/13622500-18a7-45ab-9d6f-89b94db99e1c-kube-api-access-6j6t5\") pod \"cinder-scheduler-0\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.512576 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-config-data\") pod \"cinder-scheduler-0\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.537815 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.547043 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-df7c996dc-ptfjn"] Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.556021 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.591138 4765 scope.go:117] "RemoveContainer" containerID="2a7fe746dd0cd6907687a4c13d8002851f97a21e2e6d44b100c7473a80e2ba98" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.627936 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-dns-svc\") pod \"dnsmasq-dns-df7c996dc-ptfjn\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.628148 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-config\") pod \"dnsmasq-dns-df7c996dc-ptfjn\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.628189 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrwz8\" (UniqueName: \"kubernetes.io/projected/c12fa7df-5c04-4a13-82ab-ba30534e618b-kube-api-access-lrwz8\") pod \"dnsmasq-dns-df7c996dc-ptfjn\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.628365 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-ovsdbserver-sb\") pod \"dnsmasq-dns-df7c996dc-ptfjn\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.628410 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-ovsdbserver-nb\") pod \"dnsmasq-dns-df7c996dc-ptfjn\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.644402 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-df7c996dc-ptfjn"] Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.644443 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-df7c996dc-ptfjn"] Dec 10 07:09:36 crc kubenswrapper[4765]: E1210 07:09:36.646775 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-lrwz8 ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" podUID="c12fa7df-5c04-4a13-82ab-ba30534e618b" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.655837 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.657815 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.663956 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.686603 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.695422 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn"] Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.698991 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.703152 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.703408 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn"] Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.733661 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-dns-svc\") pod \"dnsmasq-dns-df7c996dc-ptfjn\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.733819 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qv52b\" (UniqueName: \"kubernetes.io/projected/f0000725-1e94-4d0d-9891-2771ed36ade8-kube-api-access-qv52b\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.733883 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-config-data-custom\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.734017 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-config\") pod \"dnsmasq-dns-df7c996dc-ptfjn\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.734071 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrwz8\" (UniqueName: \"kubernetes.io/projected/c12fa7df-5c04-4a13-82ab-ba30534e618b-kube-api-access-lrwz8\") pod \"dnsmasq-dns-df7c996dc-ptfjn\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.734206 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-ovsdbserver-sb\") pod \"dnsmasq-dns-df7c996dc-ptfjn\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.734258 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-ovsdbserver-nb\") pod \"dnsmasq-dns-df7c996dc-ptfjn\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.734287 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0000725-1e94-4d0d-9891-2771ed36ade8-logs\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.734342 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-scripts\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.734394 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.734438 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-config-data\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.734474 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f0000725-1e94-4d0d-9891-2771ed36ade8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.735995 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-config\") pod \"dnsmasq-dns-df7c996dc-ptfjn\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.737747 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-ovsdbserver-nb\") pod \"dnsmasq-dns-df7c996dc-ptfjn\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.738307 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-ovsdbserver-sb\") pod \"dnsmasq-dns-df7c996dc-ptfjn\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.740790 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.754497 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-dns-svc\") pod \"dnsmasq-dns-df7c996dc-ptfjn\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.761501 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrwz8\" (UniqueName: \"kubernetes.io/projected/c12fa7df-5c04-4a13-82ab-ba30534e618b-kube-api-access-lrwz8\") pod \"dnsmasq-dns-df7c996dc-ptfjn\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.806222 4765 generic.go:334] "Generic (PLEG): container finished" podID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerID="3aebb2006de814ae053878be97758c7cf1c7d0b448e0c6cb907a1636fb213e60" exitCode=0 Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.806262 4765 generic.go:334] "Generic (PLEG): container finished" podID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerID="a32f5b56b48c2c1996593a41f8396d78946d83b91bd4f7465cb4be1119bc11a0" exitCode=2 Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.806319 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.806970 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a408295-9dcc-4bde-8f4c-019bc7585479","Type":"ContainerDied","Data":"3aebb2006de814ae053878be97758c7cf1c7d0b448e0c6cb907a1636fb213e60"} Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.807010 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a408295-9dcc-4bde-8f4c-019bc7585479","Type":"ContainerDied","Data":"a32f5b56b48c2c1996593a41f8396d78946d83b91bd4f7465cb4be1119bc11a0"} Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.830766 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.840898 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-ovsdbserver-nb\") pod \"dnsmasq-dns-7fcdc9f4bf-cpfgn\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.840946 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-config\") pod \"dnsmasq-dns-7fcdc9f4bf-cpfgn\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.840982 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xkhk\" (UniqueName: \"kubernetes.io/projected/3e62750c-eec4-43f8-afb4-8f8d8e794247-kube-api-access-5xkhk\") pod \"dnsmasq-dns-7fcdc9f4bf-cpfgn\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.841266 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0000725-1e94-4d0d-9891-2771ed36ade8-logs\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.841359 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-scripts\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.841459 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-dns-svc\") pod \"dnsmasq-dns-7fcdc9f4bf-cpfgn\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.841503 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.841561 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-config-data\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.841588 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f0000725-1e94-4d0d-9891-2771ed36ade8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.841672 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-ovsdbserver-sb\") pod \"dnsmasq-dns-7fcdc9f4bf-cpfgn\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.841822 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qv52b\" (UniqueName: \"kubernetes.io/projected/f0000725-1e94-4d0d-9891-2771ed36ade8-kube-api-access-qv52b\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.841911 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-config-data-custom\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.841952 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-dns-swift-storage-0\") pod \"dnsmasq-dns-7fcdc9f4bf-cpfgn\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.842445 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0000725-1e94-4d0d-9891-2771ed36ade8-logs\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.842560 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f0000725-1e94-4d0d-9891-2771ed36ade8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.850629 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-scripts\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.855169 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-config-data-custom\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.857747 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.863460 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-config-data\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.876047 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qv52b\" (UniqueName: \"kubernetes.io/projected/f0000725-1e94-4d0d-9891-2771ed36ade8-kube-api-access-qv52b\") pod \"cinder-api-0\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " pod="openstack/cinder-api-0" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.943320 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrwz8\" (UniqueName: \"kubernetes.io/projected/c12fa7df-5c04-4a13-82ab-ba30534e618b-kube-api-access-lrwz8\") pod \"c12fa7df-5c04-4a13-82ab-ba30534e618b\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.943991 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-dns-svc\") pod \"c12fa7df-5c04-4a13-82ab-ba30534e618b\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.946314 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-ovsdbserver-nb\") pod \"c12fa7df-5c04-4a13-82ab-ba30534e618b\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.946457 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-ovsdbserver-sb\") pod \"c12fa7df-5c04-4a13-82ab-ba30534e618b\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.946718 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-config\") pod \"c12fa7df-5c04-4a13-82ab-ba30534e618b\" (UID: \"c12fa7df-5c04-4a13-82ab-ba30534e618b\") " Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.946788 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c12fa7df-5c04-4a13-82ab-ba30534e618b" (UID: "c12fa7df-5c04-4a13-82ab-ba30534e618b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.947031 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c12fa7df-5c04-4a13-82ab-ba30534e618b" (UID: "c12fa7df-5c04-4a13-82ab-ba30534e618b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.947137 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-ovsdbserver-sb\") pod \"dnsmasq-dns-7fcdc9f4bf-cpfgn\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.947337 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-dns-swift-storage-0\") pod \"dnsmasq-dns-7fcdc9f4bf-cpfgn\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.947391 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-ovsdbserver-nb\") pod \"dnsmasq-dns-7fcdc9f4bf-cpfgn\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.947415 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-config\") pod \"dnsmasq-dns-7fcdc9f4bf-cpfgn\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.947465 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xkhk\" (UniqueName: \"kubernetes.io/projected/3e62750c-eec4-43f8-afb4-8f8d8e794247-kube-api-access-5xkhk\") pod \"dnsmasq-dns-7fcdc9f4bf-cpfgn\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.947576 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-config" (OuterVolumeSpecName: "config") pod "c12fa7df-5c04-4a13-82ab-ba30534e618b" (UID: "c12fa7df-5c04-4a13-82ab-ba30534e618b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.947794 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-dns-svc\") pod \"dnsmasq-dns-7fcdc9f4bf-cpfgn\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.947901 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.947918 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.947932 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.948340 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c12fa7df-5c04-4a13-82ab-ba30534e618b" (UID: "c12fa7df-5c04-4a13-82ab-ba30534e618b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.948572 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-config\") pod \"dnsmasq-dns-7fcdc9f4bf-cpfgn\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.948797 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-dns-svc\") pod \"dnsmasq-dns-7fcdc9f4bf-cpfgn\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.949210 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-ovsdbserver-sb\") pod \"dnsmasq-dns-7fcdc9f4bf-cpfgn\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.949479 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-dns-swift-storage-0\") pod \"dnsmasq-dns-7fcdc9f4bf-cpfgn\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.949746 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-ovsdbserver-nb\") pod \"dnsmasq-dns-7fcdc9f4bf-cpfgn\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.971355 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c12fa7df-5c04-4a13-82ab-ba30534e618b-kube-api-access-lrwz8" (OuterVolumeSpecName: "kube-api-access-lrwz8") pod "c12fa7df-5c04-4a13-82ab-ba30534e618b" (UID: "c12fa7df-5c04-4a13-82ab-ba30534e618b"). InnerVolumeSpecName "kube-api-access-lrwz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.973306 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-66fcc55b75-8hcl6"] Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.975651 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xkhk\" (UniqueName: \"kubernetes.io/projected/3e62750c-eec4-43f8-afb4-8f8d8e794247-kube-api-access-5xkhk\") pod \"dnsmasq-dns-7fcdc9f4bf-cpfgn\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:36 crc kubenswrapper[4765]: I1210 07:09:36.994006 4765 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.005989 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.050306 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84d886847-svlv6"] Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.051514 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrwz8\" (UniqueName: \"kubernetes.io/projected/c12fa7df-5c04-4a13-82ab-ba30534e618b-kube-api-access-lrwz8\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.051553 4765 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c12fa7df-5c04-4a13-82ab-ba30534e618b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.069910 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.202464 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7756b4f44b-9575x"] Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.226664 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-679d966d56-j58pb"] Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.466280 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.598878 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.718370 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn"] Dec 10 07:09:37 crc kubenswrapper[4765]: W1210 07:09:37.725388 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e62750c_eec4_43f8_afb4_8f8d8e794247.slice/crio-4850f5ce5db4f67f2747bd41d8a10c093188f984445e2f8caf8ff670fadc21db WatchSource:0}: Error finding container 4850f5ce5db4f67f2747bd41d8a10c093188f984445e2f8caf8ff670fadc21db: Status 404 returned error can't find the container with id 4850f5ce5db4f67f2747bd41d8a10c093188f984445e2f8caf8ff670fadc21db Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.823293 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-794648c68d-ljskq_cc86d6d5-ff4f-4d50-9587-b6661e7ac16f/neutron-httpd/2.log" Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.823699 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-794648c68d-ljskq_cc86d6d5-ff4f-4d50-9587-b6661e7ac16f/neutron-httpd/1.log" Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.824888 4765 generic.go:334] "Generic (PLEG): container finished" podID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" containerID="e895e33ce929e1f5ab9d76d0803b6350b42f1b0e2643996a743f8e054820111b" exitCode=1 Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.824960 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-794648c68d-ljskq" event={"ID":"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f","Type":"ContainerDied","Data":"e895e33ce929e1f5ab9d76d0803b6350b42f1b0e2643996a743f8e054820111b"} Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.824998 4765 scope.go:117] "RemoveContainer" containerID="2a7fe746dd0cd6907687a4c13d8002851f97a21e2e6d44b100c7473a80e2ba98" Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.825901 4765 scope.go:117] "RemoveContainer" containerID="e895e33ce929e1f5ab9d76d0803b6350b42f1b0e2643996a743f8e054820111b" Dec 10 07:09:37 crc kubenswrapper[4765]: E1210 07:09:37.826143 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"neutron-httpd\" with CrashLoopBackOff: \"back-off 20s restarting failed container=neutron-httpd pod=neutron-794648c68d-ljskq_openstack(cc86d6d5-ff4f-4d50-9587-b6661e7ac16f)\"" pod="openstack/neutron-794648c68d-ljskq" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.831075 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" event={"ID":"367a49cf-488a-4852-8728-78dacbfbd500","Type":"ContainerStarted","Data":"d25984e152e6eb632f787bf2b18ebb3a4a22ad06817f65a6d37d26b1554ed376"} Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.834074 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" event={"ID":"3e62750c-eec4-43f8-afb4-8f8d8e794247","Type":"ContainerStarted","Data":"4850f5ce5db4f67f2747bd41d8a10c093188f984445e2f8caf8ff670fadc21db"} Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.839788 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-66fcc55b75-8hcl6" event={"ID":"865e0a69-0d85-4d93-9d38-f52449d09d87","Type":"ContainerStarted","Data":"a2bf5e315028c62ae807f755c9bf7e1759223d795629b89a473469c587e7df12"} Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.859335 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-679d966d56-j58pb" event={"ID":"d3683f1d-a6e8-4762-abad-02773d41261a","Type":"ContainerStarted","Data":"62d00604671a09aa223cc9c4a3bd0d4cf2417abde160a2945c973d7f113448e0"} Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.859383 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-679d966d56-j58pb" event={"ID":"d3683f1d-a6e8-4762-abad-02773d41261a","Type":"ContainerStarted","Data":"70c146928da7955eaa2e9607e62ecefd6d836391475af546dfd81f8893d859e8"} Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.859483 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.859632 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.861641 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"13622500-18a7-45ab-9d6f-89b94db99e1c","Type":"ContainerStarted","Data":"1fbcf41eaee83ed3f2630711ce06e02ad812b1fe995bfb058d35fd46eead157a"} Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.865557 4765 generic.go:334] "Generic (PLEG): container finished" podID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerID="9a4040ee2b199024eab0c4287d221a7658920bcd1daca0b3e1d75a6f207395c4" exitCode=0 Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.865628 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a408295-9dcc-4bde-8f4c-019bc7585479","Type":"ContainerDied","Data":"9a4040ee2b199024eab0c4287d221a7658920bcd1daca0b3e1d75a6f207395c4"} Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.868861 4765 generic.go:334] "Generic (PLEG): container finished" podID="60534b9b-5f03-4d05-91ff-68e6f141ecc3" containerID="22771a83b505f3388a2a2fb8688810df2da9b4656ec5c501d9e6d446c8f06485" exitCode=0 Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.868907 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84d886847-svlv6" event={"ID":"60534b9b-5f03-4d05-91ff-68e6f141ecc3","Type":"ContainerDied","Data":"22771a83b505f3388a2a2fb8688810df2da9b4656ec5c501d9e6d446c8f06485"} Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.868945 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84d886847-svlv6" event={"ID":"60534b9b-5f03-4d05-91ff-68e6f141ecc3","Type":"ContainerStarted","Data":"d3c4ed82a3e3fae98afee09bb3b99517c08a535f8b39ea75068384407caafd55"} Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.872852 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f0000725-1e94-4d0d-9891-2771ed36ade8","Type":"ContainerStarted","Data":"899bec10727e39158ea63be1e9e128093a1a44344de9a878e3d0ede89b9aa3d5"} Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.872882 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-df7c996dc-ptfjn" Dec 10 07:09:37 crc kubenswrapper[4765]: I1210 07:09:37.892812 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-679d966d56-j58pb" podStartSLOduration=2.892792588 podStartE2EDuration="2.892792588s" podCreationTimestamp="2025-12-10 07:09:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:09:37.882227278 +0000 UTC m=+1297.608892594" watchObservedRunningTime="2025-12-10 07:09:37.892792588 +0000 UTC m=+1297.619457894" Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.025707 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-df7c996dc-ptfjn"] Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.039315 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-df7c996dc-ptfjn"] Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.602893 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c12fa7df-5c04-4a13-82ab-ba30534e618b" path="/var/lib/kubelet/pods/c12fa7df-5c04-4a13-82ab-ba30534e618b/volumes" Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.741708 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.820050 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-dns-svc\") pod \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.820177 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-ovsdbserver-sb\") pod \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.820258 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-ovsdbserver-nb\") pod \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.820317 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfv9h\" (UniqueName: \"kubernetes.io/projected/60534b9b-5f03-4d05-91ff-68e6f141ecc3-kube-api-access-cfv9h\") pod \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.820393 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-config\") pod \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\" (UID: \"60534b9b-5f03-4d05-91ff-68e6f141ecc3\") " Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.829209 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60534b9b-5f03-4d05-91ff-68e6f141ecc3-kube-api-access-cfv9h" (OuterVolumeSpecName: "kube-api-access-cfv9h") pod "60534b9b-5f03-4d05-91ff-68e6f141ecc3" (UID: "60534b9b-5f03-4d05-91ff-68e6f141ecc3"). InnerVolumeSpecName "kube-api-access-cfv9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.893171 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-794648c68d-ljskq_cc86d6d5-ff4f-4d50-9587-b6661e7ac16f/neutron-httpd/2.log" Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.896400 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "60534b9b-5f03-4d05-91ff-68e6f141ecc3" (UID: "60534b9b-5f03-4d05-91ff-68e6f141ecc3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.898643 4765 generic.go:334] "Generic (PLEG): container finished" podID="3e62750c-eec4-43f8-afb4-8f8d8e794247" containerID="bd38afdebf4473fd3cd8da183475f7625bd16ec0139b00453a5468d1230f11a3" exitCode=0 Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.898711 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" event={"ID":"3e62750c-eec4-43f8-afb4-8f8d8e794247","Type":"ContainerDied","Data":"bd38afdebf4473fd3cd8da183475f7625bd16ec0139b00453a5468d1230f11a3"} Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.908067 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "60534b9b-5f03-4d05-91ff-68e6f141ecc3" (UID: "60534b9b-5f03-4d05-91ff-68e6f141ecc3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.909064 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84d886847-svlv6" Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.909061 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84d886847-svlv6" event={"ID":"60534b9b-5f03-4d05-91ff-68e6f141ecc3","Type":"ContainerDied","Data":"d3c4ed82a3e3fae98afee09bb3b99517c08a535f8b39ea75068384407caafd55"} Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.909253 4765 scope.go:117] "RemoveContainer" containerID="22771a83b505f3388a2a2fb8688810df2da9b4656ec5c501d9e6d446c8f06485" Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.911733 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-config" (OuterVolumeSpecName: "config") pod "60534b9b-5f03-4d05-91ff-68e6f141ecc3" (UID: "60534b9b-5f03-4d05-91ff-68e6f141ecc3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.914612 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "60534b9b-5f03-4d05-91ff-68e6f141ecc3" (UID: "60534b9b-5f03-4d05-91ff-68e6f141ecc3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.918010 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-679d966d56-j58pb" event={"ID":"d3683f1d-a6e8-4762-abad-02773d41261a","Type":"ContainerStarted","Data":"fa4a19213f360b235a931754d45d7ae8bc502d8558d314f8238e32784ca0d010"} Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.927327 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.927417 4765 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.927433 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.927448 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/60534b9b-5f03-4d05-91ff-68e6f141ecc3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:38 crc kubenswrapper[4765]: I1210 07:09:38.927464 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfv9h\" (UniqueName: \"kubernetes.io/projected/60534b9b-5f03-4d05-91ff-68e6f141ecc3-kube-api-access-cfv9h\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:39 crc kubenswrapper[4765]: I1210 07:09:39.229927 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 10 07:09:39 crc kubenswrapper[4765]: I1210 07:09:39.726484 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84d886847-svlv6"] Dec 10 07:09:39 crc kubenswrapper[4765]: I1210 07:09:39.734797 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84d886847-svlv6"] Dec 10 07:09:39 crc kubenswrapper[4765]: I1210 07:09:39.940773 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f0000725-1e94-4d0d-9891-2771ed36ade8","Type":"ContainerStarted","Data":"3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70"} Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.631373 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60534b9b-5f03-4d05-91ff-68e6f141ecc3" path="/var/lib/kubelet/pods/60534b9b-5f03-4d05-91ff-68e6f141ecc3/volumes" Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.751497 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.877859 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a408295-9dcc-4bde-8f4c-019bc7585479-log-httpd\") pod \"3a408295-9dcc-4bde-8f4c-019bc7585479\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.877943 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-scripts\") pod \"3a408295-9dcc-4bde-8f4c-019bc7585479\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.878025 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-combined-ca-bundle\") pod \"3a408295-9dcc-4bde-8f4c-019bc7585479\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.878058 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wzt5\" (UniqueName: \"kubernetes.io/projected/3a408295-9dcc-4bde-8f4c-019bc7585479-kube-api-access-4wzt5\") pod \"3a408295-9dcc-4bde-8f4c-019bc7585479\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.878113 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a408295-9dcc-4bde-8f4c-019bc7585479-run-httpd\") pod \"3a408295-9dcc-4bde-8f4c-019bc7585479\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.878204 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-sg-core-conf-yaml\") pod \"3a408295-9dcc-4bde-8f4c-019bc7585479\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.878251 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-config-data\") pod \"3a408295-9dcc-4bde-8f4c-019bc7585479\" (UID: \"3a408295-9dcc-4bde-8f4c-019bc7585479\") " Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.879051 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a408295-9dcc-4bde-8f4c-019bc7585479-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3a408295-9dcc-4bde-8f4c-019bc7585479" (UID: "3a408295-9dcc-4bde-8f4c-019bc7585479"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.879529 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a408295-9dcc-4bde-8f4c-019bc7585479-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3a408295-9dcc-4bde-8f4c-019bc7585479" (UID: "3a408295-9dcc-4bde-8f4c-019bc7585479"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.892790 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a408295-9dcc-4bde-8f4c-019bc7585479-kube-api-access-4wzt5" (OuterVolumeSpecName: "kube-api-access-4wzt5") pod "3a408295-9dcc-4bde-8f4c-019bc7585479" (UID: "3a408295-9dcc-4bde-8f4c-019bc7585479"). InnerVolumeSpecName "kube-api-access-4wzt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.896478 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-scripts" (OuterVolumeSpecName: "scripts") pod "3a408295-9dcc-4bde-8f4c-019bc7585479" (UID: "3a408295-9dcc-4bde-8f4c-019bc7585479"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.961118 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" event={"ID":"367a49cf-488a-4852-8728-78dacbfbd500","Type":"ContainerStarted","Data":"6904c7a18870c99e59c094d428f4fb9aa0a7611afe5301700842c547c5f6ff4a"} Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.966827 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"13622500-18a7-45ab-9d6f-89b94db99e1c","Type":"ContainerStarted","Data":"97ad9d4730036a0561d67b4e64fd9114b22482fdd330ae076073e602aa5189b9"} Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.973397 4765 generic.go:334] "Generic (PLEG): container finished" podID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerID="3bd7b57c49d8c40349721b3f40f74a16d2e7f0d15aaef2b718bb938bddc3df91" exitCode=0 Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.973625 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a408295-9dcc-4bde-8f4c-019bc7585479","Type":"ContainerDied","Data":"3bd7b57c49d8c40349721b3f40f74a16d2e7f0d15aaef2b718bb938bddc3df91"} Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.973676 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a408295-9dcc-4bde-8f4c-019bc7585479","Type":"ContainerDied","Data":"8244c6398e56e3fdb56f085cc1c2b30653899e7475ff67fe0d072aab0ff34e6a"} Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.973697 4765 scope.go:117] "RemoveContainer" containerID="3aebb2006de814ae053878be97758c7cf1c7d0b448e0c6cb907a1636fb213e60" Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.973850 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.980184 4765 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a408295-9dcc-4bde-8f4c-019bc7585479-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.980215 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.980225 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wzt5\" (UniqueName: \"kubernetes.io/projected/3a408295-9dcc-4bde-8f4c-019bc7585479-kube-api-access-4wzt5\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.980238 4765 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a408295-9dcc-4bde-8f4c-019bc7585479-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.981786 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" event={"ID":"3e62750c-eec4-43f8-afb4-8f8d8e794247","Type":"ContainerStarted","Data":"fd5e20ca82ef15db6b1e7c06f3847cce05ab0c21f77382df7a18465a87dd8baa"} Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.981849 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:40 crc kubenswrapper[4765]: I1210 07:09:40.987879 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-66fcc55b75-8hcl6" event={"ID":"865e0a69-0d85-4d93-9d38-f52449d09d87","Type":"ContainerStarted","Data":"2ea6901c79ede6a161d2e30da9d3b8efb9ba7a80dd2d68ffbc3c35ec54f42907"} Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.010927 4765 scope.go:117] "RemoveContainer" containerID="a32f5b56b48c2c1996593a41f8396d78946d83b91bd4f7465cb4be1119bc11a0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.012317 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" podStartSLOduration=5.012303591 podStartE2EDuration="5.012303591s" podCreationTimestamp="2025-12-10 07:09:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:09:41.001771551 +0000 UTC m=+1300.728436867" watchObservedRunningTime="2025-12-10 07:09:41.012303591 +0000 UTC m=+1300.738968907" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.028198 4765 scope.go:117] "RemoveContainer" containerID="3bd7b57c49d8c40349721b3f40f74a16d2e7f0d15aaef2b718bb938bddc3df91" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.035163 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3a408295-9dcc-4bde-8f4c-019bc7585479" (UID: "3a408295-9dcc-4bde-8f4c-019bc7585479"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.067595 4765 scope.go:117] "RemoveContainer" containerID="9a4040ee2b199024eab0c4287d221a7658920bcd1daca0b3e1d75a6f207395c4" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.083336 4765 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.098531 4765 scope.go:117] "RemoveContainer" containerID="3aebb2006de814ae053878be97758c7cf1c7d0b448e0c6cb907a1636fb213e60" Dec 10 07:09:41 crc kubenswrapper[4765]: E1210 07:09:41.099785 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3aebb2006de814ae053878be97758c7cf1c7d0b448e0c6cb907a1636fb213e60\": container with ID starting with 3aebb2006de814ae053878be97758c7cf1c7d0b448e0c6cb907a1636fb213e60 not found: ID does not exist" containerID="3aebb2006de814ae053878be97758c7cf1c7d0b448e0c6cb907a1636fb213e60" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.099843 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3aebb2006de814ae053878be97758c7cf1c7d0b448e0c6cb907a1636fb213e60"} err="failed to get container status \"3aebb2006de814ae053878be97758c7cf1c7d0b448e0c6cb907a1636fb213e60\": rpc error: code = NotFound desc = could not find container \"3aebb2006de814ae053878be97758c7cf1c7d0b448e0c6cb907a1636fb213e60\": container with ID starting with 3aebb2006de814ae053878be97758c7cf1c7d0b448e0c6cb907a1636fb213e60 not found: ID does not exist" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.099878 4765 scope.go:117] "RemoveContainer" containerID="a32f5b56b48c2c1996593a41f8396d78946d83b91bd4f7465cb4be1119bc11a0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.100449 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3a408295-9dcc-4bde-8f4c-019bc7585479" (UID: "3a408295-9dcc-4bde-8f4c-019bc7585479"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:41 crc kubenswrapper[4765]: E1210 07:09:41.103350 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a32f5b56b48c2c1996593a41f8396d78946d83b91bd4f7465cb4be1119bc11a0\": container with ID starting with a32f5b56b48c2c1996593a41f8396d78946d83b91bd4f7465cb4be1119bc11a0 not found: ID does not exist" containerID="a32f5b56b48c2c1996593a41f8396d78946d83b91bd4f7465cb4be1119bc11a0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.103380 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a32f5b56b48c2c1996593a41f8396d78946d83b91bd4f7465cb4be1119bc11a0"} err="failed to get container status \"a32f5b56b48c2c1996593a41f8396d78946d83b91bd4f7465cb4be1119bc11a0\": rpc error: code = NotFound desc = could not find container \"a32f5b56b48c2c1996593a41f8396d78946d83b91bd4f7465cb4be1119bc11a0\": container with ID starting with a32f5b56b48c2c1996593a41f8396d78946d83b91bd4f7465cb4be1119bc11a0 not found: ID does not exist" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.103398 4765 scope.go:117] "RemoveContainer" containerID="3bd7b57c49d8c40349721b3f40f74a16d2e7f0d15aaef2b718bb938bddc3df91" Dec 10 07:09:41 crc kubenswrapper[4765]: E1210 07:09:41.103763 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bd7b57c49d8c40349721b3f40f74a16d2e7f0d15aaef2b718bb938bddc3df91\": container with ID starting with 3bd7b57c49d8c40349721b3f40f74a16d2e7f0d15aaef2b718bb938bddc3df91 not found: ID does not exist" containerID="3bd7b57c49d8c40349721b3f40f74a16d2e7f0d15aaef2b718bb938bddc3df91" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.103815 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bd7b57c49d8c40349721b3f40f74a16d2e7f0d15aaef2b718bb938bddc3df91"} err="failed to get container status \"3bd7b57c49d8c40349721b3f40f74a16d2e7f0d15aaef2b718bb938bddc3df91\": rpc error: code = NotFound desc = could not find container \"3bd7b57c49d8c40349721b3f40f74a16d2e7f0d15aaef2b718bb938bddc3df91\": container with ID starting with 3bd7b57c49d8c40349721b3f40f74a16d2e7f0d15aaef2b718bb938bddc3df91 not found: ID does not exist" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.103850 4765 scope.go:117] "RemoveContainer" containerID="9a4040ee2b199024eab0c4287d221a7658920bcd1daca0b3e1d75a6f207395c4" Dec 10 07:09:41 crc kubenswrapper[4765]: E1210 07:09:41.104250 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a4040ee2b199024eab0c4287d221a7658920bcd1daca0b3e1d75a6f207395c4\": container with ID starting with 9a4040ee2b199024eab0c4287d221a7658920bcd1daca0b3e1d75a6f207395c4 not found: ID does not exist" containerID="9a4040ee2b199024eab0c4287d221a7658920bcd1daca0b3e1d75a6f207395c4" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.104295 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a4040ee2b199024eab0c4287d221a7658920bcd1daca0b3e1d75a6f207395c4"} err="failed to get container status \"9a4040ee2b199024eab0c4287d221a7658920bcd1daca0b3e1d75a6f207395c4\": rpc error: code = NotFound desc = could not find container \"9a4040ee2b199024eab0c4287d221a7658920bcd1daca0b3e1d75a6f207395c4\": container with ID starting with 9a4040ee2b199024eab0c4287d221a7658920bcd1daca0b3e1d75a6f207395c4 not found: ID does not exist" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.149880 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-config-data" (OuterVolumeSpecName: "config-data") pod "3a408295-9dcc-4bde-8f4c-019bc7585479" (UID: "3a408295-9dcc-4bde-8f4c-019bc7585479"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.185629 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.185675 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a408295-9dcc-4bde-8f4c-019bc7585479-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.323047 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.337136 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.348381 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:09:41 crc kubenswrapper[4765]: E1210 07:09:41.348871 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerName="ceilometer-central-agent" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.348892 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerName="ceilometer-central-agent" Dec 10 07:09:41 crc kubenswrapper[4765]: E1210 07:09:41.348914 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerName="proxy-httpd" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.348923 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerName="proxy-httpd" Dec 10 07:09:41 crc kubenswrapper[4765]: E1210 07:09:41.348955 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60534b9b-5f03-4d05-91ff-68e6f141ecc3" containerName="init" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.348963 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="60534b9b-5f03-4d05-91ff-68e6f141ecc3" containerName="init" Dec 10 07:09:41 crc kubenswrapper[4765]: E1210 07:09:41.348976 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerName="sg-core" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.348983 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerName="sg-core" Dec 10 07:09:41 crc kubenswrapper[4765]: E1210 07:09:41.348996 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerName="ceilometer-notification-agent" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.349003 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerName="ceilometer-notification-agent" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.349261 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerName="ceilometer-notification-agent" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.349286 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerName="ceilometer-central-agent" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.349297 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerName="proxy-httpd" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.349320 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a408295-9dcc-4bde-8f4c-019bc7585479" containerName="sg-core" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.349331 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="60534b9b-5f03-4d05-91ff-68e6f141ecc3" containerName="init" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.351324 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.354197 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.357508 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.394599 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.491129 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.491189 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-config-data\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.491225 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e26110ee-0dc1-4658-98bf-4e017a4e5aca-log-httpd\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.491371 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.491396 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-scripts\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.491454 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e26110ee-0dc1-4658-98bf-4e017a4e5aca-run-httpd\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.491510 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwstk\" (UniqueName: \"kubernetes.io/projected/e26110ee-0dc1-4658-98bf-4e017a4e5aca-kube-api-access-pwstk\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.592899 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.592975 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-scripts\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.593006 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e26110ee-0dc1-4658-98bf-4e017a4e5aca-run-httpd\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.593045 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwstk\" (UniqueName: \"kubernetes.io/projected/e26110ee-0dc1-4658-98bf-4e017a4e5aca-kube-api-access-pwstk\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.593144 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.593173 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-config-data\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.593202 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e26110ee-0dc1-4658-98bf-4e017a4e5aca-log-httpd\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.593821 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e26110ee-0dc1-4658-98bf-4e017a4e5aca-run-httpd\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.594007 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e26110ee-0dc1-4658-98bf-4e017a4e5aca-log-httpd\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.599179 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.599645 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.607871 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-scripts\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.613318 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-config-data\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.617187 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwstk\" (UniqueName: \"kubernetes.io/projected/e26110ee-0dc1-4658-98bf-4e017a4e5aca-kube-api-access-pwstk\") pod \"ceilometer-0\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " pod="openstack/ceilometer-0" Dec 10 07:09:41 crc kubenswrapper[4765]: I1210 07:09:41.679389 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:09:42 crc kubenswrapper[4765]: I1210 07:09:42.111128 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" event={"ID":"367a49cf-488a-4852-8728-78dacbfbd500","Type":"ContainerStarted","Data":"db7c3938391cd43e268448d1b8ffd385e79c3741b66633044668f1d83c8facc4"} Dec 10 07:09:42 crc kubenswrapper[4765]: I1210 07:09:42.133463 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"13622500-18a7-45ab-9d6f-89b94db99e1c","Type":"ContainerStarted","Data":"2e056095c38de57f03f4892e50faa4f92ed00c89cfe3f5b322926d3011aae29a"} Dec 10 07:09:42 crc kubenswrapper[4765]: I1210 07:09:42.140426 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" podStartSLOduration=4.008188061 podStartE2EDuration="7.140403023s" podCreationTimestamp="2025-12-10 07:09:35 +0000 UTC" firstStartedPulling="2025-12-10 07:09:37.23334385 +0000 UTC m=+1296.960009166" lastFinishedPulling="2025-12-10 07:09:40.365558812 +0000 UTC m=+1300.092224128" observedRunningTime="2025-12-10 07:09:42.139413675 +0000 UTC m=+1301.866078991" watchObservedRunningTime="2025-12-10 07:09:42.140403023 +0000 UTC m=+1301.867068339" Dec 10 07:09:42 crc kubenswrapper[4765]: I1210 07:09:42.170363 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-66fcc55b75-8hcl6" event={"ID":"865e0a69-0d85-4d93-9d38-f52449d09d87","Type":"ContainerStarted","Data":"cb7c75803c1530a9151f1c8f58e20ea86e4933c86c600aeb7834d5e89e66efe6"} Dec 10 07:09:42 crc kubenswrapper[4765]: I1210 07:09:42.181468 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.02755196 podStartE2EDuration="6.181440611s" podCreationTimestamp="2025-12-10 07:09:36 +0000 UTC" firstStartedPulling="2025-12-10 07:09:37.469797872 +0000 UTC m=+1297.196463188" lastFinishedPulling="2025-12-10 07:09:38.623686523 +0000 UTC m=+1298.350351839" observedRunningTime="2025-12-10 07:09:42.167612127 +0000 UTC m=+1301.894277443" watchObservedRunningTime="2025-12-10 07:09:42.181440611 +0000 UTC m=+1301.908105927" Dec 10 07:09:42 crc kubenswrapper[4765]: I1210 07:09:42.191757 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="f0000725-1e94-4d0d-9891-2771ed36ade8" containerName="cinder-api-log" containerID="cri-o://3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70" gracePeriod=30 Dec 10 07:09:42 crc kubenswrapper[4765]: I1210 07:09:42.192044 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f0000725-1e94-4d0d-9891-2771ed36ade8","Type":"ContainerStarted","Data":"dbe12e8c94f1ff58a185406cee4fd8cc4eae459c3b128e296b829bd86df4a482"} Dec 10 07:09:42 crc kubenswrapper[4765]: I1210 07:09:42.192078 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 10 07:09:42 crc kubenswrapper[4765]: I1210 07:09:42.192128 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="f0000725-1e94-4d0d-9891-2771ed36ade8" containerName="cinder-api" containerID="cri-o://dbe12e8c94f1ff58a185406cee4fd8cc4eae459c3b128e296b829bd86df4a482" gracePeriod=30 Dec 10 07:09:42 crc kubenswrapper[4765]: I1210 07:09:42.204043 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-66fcc55b75-8hcl6" podStartSLOduration=4.094955573 podStartE2EDuration="7.204012633s" podCreationTimestamp="2025-12-10 07:09:35 +0000 UTC" firstStartedPulling="2025-12-10 07:09:36.993640196 +0000 UTC m=+1296.720305512" lastFinishedPulling="2025-12-10 07:09:40.102697256 +0000 UTC m=+1299.829362572" observedRunningTime="2025-12-10 07:09:42.192331611 +0000 UTC m=+1301.918996927" watchObservedRunningTime="2025-12-10 07:09:42.204012633 +0000 UTC m=+1301.930677949" Dec 10 07:09:42 crc kubenswrapper[4765]: I1210 07:09:42.239975 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:09:42 crc kubenswrapper[4765]: I1210 07:09:42.243338 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.243305821 podStartE2EDuration="6.243305821s" podCreationTimestamp="2025-12-10 07:09:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:09:42.211605479 +0000 UTC m=+1301.938270795" watchObservedRunningTime="2025-12-10 07:09:42.243305821 +0000 UTC m=+1301.969971137" Dec 10 07:09:42 crc kubenswrapper[4765]: E1210 07:09:42.476293 4765 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf0000725_1e94_4d0d_9891_2771ed36ade8.slice/crio-conmon-3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf0000725_1e94_4d0d_9891_2771ed36ade8.slice/crio-3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70.scope\": RecentStats: unable to find data in memory cache]" Dec 10 07:09:42 crc kubenswrapper[4765]: I1210 07:09:42.609374 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a408295-9dcc-4bde-8f4c-019bc7585479" path="/var/lib/kubelet/pods/3a408295-9dcc-4bde-8f4c-019bc7585479/volumes" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.047782 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.146726 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f0000725-1e94-4d0d-9891-2771ed36ade8-etc-machine-id\") pod \"f0000725-1e94-4d0d-9891-2771ed36ade8\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.146839 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-combined-ca-bundle\") pod \"f0000725-1e94-4d0d-9891-2771ed36ade8\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.146894 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-scripts\") pod \"f0000725-1e94-4d0d-9891-2771ed36ade8\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.146981 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-config-data\") pod \"f0000725-1e94-4d0d-9891-2771ed36ade8\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.147223 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0000725-1e94-4d0d-9891-2771ed36ade8-logs\") pod \"f0000725-1e94-4d0d-9891-2771ed36ade8\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.147319 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qv52b\" (UniqueName: \"kubernetes.io/projected/f0000725-1e94-4d0d-9891-2771ed36ade8-kube-api-access-qv52b\") pod \"f0000725-1e94-4d0d-9891-2771ed36ade8\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.147368 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-config-data-custom\") pod \"f0000725-1e94-4d0d-9891-2771ed36ade8\" (UID: \"f0000725-1e94-4d0d-9891-2771ed36ade8\") " Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.149064 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f0000725-1e94-4d0d-9891-2771ed36ade8-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f0000725-1e94-4d0d-9891-2771ed36ade8" (UID: "f0000725-1e94-4d0d-9891-2771ed36ade8"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.152681 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0000725-1e94-4d0d-9891-2771ed36ade8-logs" (OuterVolumeSpecName: "logs") pod "f0000725-1e94-4d0d-9891-2771ed36ade8" (UID: "f0000725-1e94-4d0d-9891-2771ed36ade8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.156160 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-scripts" (OuterVolumeSpecName: "scripts") pod "f0000725-1e94-4d0d-9891-2771ed36ade8" (UID: "f0000725-1e94-4d0d-9891-2771ed36ade8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.156321 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.158699 4765 scope.go:117] "RemoveContainer" containerID="e895e33ce929e1f5ab9d76d0803b6350b42f1b0e2643996a743f8e054820111b" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.158918 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:43 crc kubenswrapper[4765]: E1210 07:09:43.159001 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"neutron-httpd\" with CrashLoopBackOff: \"back-off 20s restarting failed container=neutron-httpd pod=neutron-794648c68d-ljskq_openstack(cc86d6d5-ff4f-4d50-9587-b6661e7ac16f)\"" pod="openstack/neutron-794648c68d-ljskq" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.160908 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f0000725-1e94-4d0d-9891-2771ed36ade8" (UID: "f0000725-1e94-4d0d-9891-2771ed36ade8"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.166343 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0000725-1e94-4d0d-9891-2771ed36ade8-kube-api-access-qv52b" (OuterVolumeSpecName: "kube-api-access-qv52b") pod "f0000725-1e94-4d0d-9891-2771ed36ade8" (UID: "f0000725-1e94-4d0d-9891-2771ed36ade8"). InnerVolumeSpecName "kube-api-access-qv52b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.166531 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/neutron-794648c68d-ljskq" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" containerName="neutron-api" probeResult="failure" output="Get \"http://10.217.0.151:9696/\": dial tcp 10.217.0.151:9696: connect: connection refused" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.198837 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f0000725-1e94-4d0d-9891-2771ed36ade8" (UID: "f0000725-1e94-4d0d-9891-2771ed36ade8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.208765 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e26110ee-0dc1-4658-98bf-4e017a4e5aca","Type":"ContainerStarted","Data":"14c39c735aa99b8b06f954400fcc21893b87cf7018fa8c0f8996297d70fa51b1"} Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.208825 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e26110ee-0dc1-4658-98bf-4e017a4e5aca","Type":"ContainerStarted","Data":"456b8d19a5acf1d4d328e1eb8e6ae8dfd3184c5cc7688cbee10b157e708139a9"} Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.212498 4765 generic.go:334] "Generic (PLEG): container finished" podID="f0000725-1e94-4d0d-9891-2771ed36ade8" containerID="dbe12e8c94f1ff58a185406cee4fd8cc4eae459c3b128e296b829bd86df4a482" exitCode=0 Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.212526 4765 generic.go:334] "Generic (PLEG): container finished" podID="f0000725-1e94-4d0d-9891-2771ed36ade8" containerID="3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70" exitCode=143 Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.213321 4765 scope.go:117] "RemoveContainer" containerID="e895e33ce929e1f5ab9d76d0803b6350b42f1b0e2643996a743f8e054820111b" Dec 10 07:09:43 crc kubenswrapper[4765]: E1210 07:09:43.213710 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"neutron-httpd\" with CrashLoopBackOff: \"back-off 20s restarting failed container=neutron-httpd pod=neutron-794648c68d-ljskq_openstack(cc86d6d5-ff4f-4d50-9587-b6661e7ac16f)\"" pod="openstack/neutron-794648c68d-ljskq" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.214152 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.214657 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f0000725-1e94-4d0d-9891-2771ed36ade8","Type":"ContainerDied","Data":"dbe12e8c94f1ff58a185406cee4fd8cc4eae459c3b128e296b829bd86df4a482"} Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.214688 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f0000725-1e94-4d0d-9891-2771ed36ade8","Type":"ContainerDied","Data":"3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70"} Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.214701 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f0000725-1e94-4d0d-9891-2771ed36ade8","Type":"ContainerDied","Data":"899bec10727e39158ea63be1e9e128093a1a44344de9a878e3d0ede89b9aa3d5"} Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.214715 4765 scope.go:117] "RemoveContainer" containerID="dbe12e8c94f1ff58a185406cee4fd8cc4eae459c3b128e296b829bd86df4a482" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.220352 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-config-data" (OuterVolumeSpecName: "config-data") pod "f0000725-1e94-4d0d-9891-2771ed36ade8" (UID: "f0000725-1e94-4d0d-9891-2771ed36ade8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.252754 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.252787 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0000725-1e94-4d0d-9891-2771ed36ade8-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.252803 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qv52b\" (UniqueName: \"kubernetes.io/projected/f0000725-1e94-4d0d-9891-2771ed36ade8-kube-api-access-qv52b\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.252816 4765 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.252827 4765 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f0000725-1e94-4d0d-9891-2771ed36ade8-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.252837 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.252847 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0000725-1e94-4d0d-9891-2771ed36ade8-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.326628 4765 scope.go:117] "RemoveContainer" containerID="3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.363281 4765 scope.go:117] "RemoveContainer" containerID="dbe12e8c94f1ff58a185406cee4fd8cc4eae459c3b128e296b829bd86df4a482" Dec 10 07:09:43 crc kubenswrapper[4765]: E1210 07:09:43.364617 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbe12e8c94f1ff58a185406cee4fd8cc4eae459c3b128e296b829bd86df4a482\": container with ID starting with dbe12e8c94f1ff58a185406cee4fd8cc4eae459c3b128e296b829bd86df4a482 not found: ID does not exist" containerID="dbe12e8c94f1ff58a185406cee4fd8cc4eae459c3b128e296b829bd86df4a482" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.364659 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbe12e8c94f1ff58a185406cee4fd8cc4eae459c3b128e296b829bd86df4a482"} err="failed to get container status \"dbe12e8c94f1ff58a185406cee4fd8cc4eae459c3b128e296b829bd86df4a482\": rpc error: code = NotFound desc = could not find container \"dbe12e8c94f1ff58a185406cee4fd8cc4eae459c3b128e296b829bd86df4a482\": container with ID starting with dbe12e8c94f1ff58a185406cee4fd8cc4eae459c3b128e296b829bd86df4a482 not found: ID does not exist" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.364679 4765 scope.go:117] "RemoveContainer" containerID="3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70" Dec 10 07:09:43 crc kubenswrapper[4765]: E1210 07:09:43.365054 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70\": container with ID starting with 3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70 not found: ID does not exist" containerID="3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.365108 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70"} err="failed to get container status \"3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70\": rpc error: code = NotFound desc = could not find container \"3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70\": container with ID starting with 3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70 not found: ID does not exist" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.365127 4765 scope.go:117] "RemoveContainer" containerID="dbe12e8c94f1ff58a185406cee4fd8cc4eae459c3b128e296b829bd86df4a482" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.365389 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbe12e8c94f1ff58a185406cee4fd8cc4eae459c3b128e296b829bd86df4a482"} err="failed to get container status \"dbe12e8c94f1ff58a185406cee4fd8cc4eae459c3b128e296b829bd86df4a482\": rpc error: code = NotFound desc = could not find container \"dbe12e8c94f1ff58a185406cee4fd8cc4eae459c3b128e296b829bd86df4a482\": container with ID starting with dbe12e8c94f1ff58a185406cee4fd8cc4eae459c3b128e296b829bd86df4a482 not found: ID does not exist" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.365417 4765 scope.go:117] "RemoveContainer" containerID="3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.365717 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70"} err="failed to get container status \"3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70\": rpc error: code = NotFound desc = could not find container \"3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70\": container with ID starting with 3c0b9578910b30eab1fd5d56d283bf1a10d1e7a661b8c4f69cd57a533389eb70 not found: ID does not exist" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.564352 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.584472 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.598111 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 10 07:09:43 crc kubenswrapper[4765]: E1210 07:09:43.598674 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0000725-1e94-4d0d-9891-2771ed36ade8" containerName="cinder-api" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.598697 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0000725-1e94-4d0d-9891-2771ed36ade8" containerName="cinder-api" Dec 10 07:09:43 crc kubenswrapper[4765]: E1210 07:09:43.598749 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0000725-1e94-4d0d-9891-2771ed36ade8" containerName="cinder-api-log" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.598756 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0000725-1e94-4d0d-9891-2771ed36ade8" containerName="cinder-api-log" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.598945 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0000725-1e94-4d0d-9891-2771ed36ade8" containerName="cinder-api" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.598968 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0000725-1e94-4d0d-9891-2771ed36ade8" containerName="cinder-api-log" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.600778 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.603297 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.603731 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.603906 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.621333 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.687640 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7df456d776-x4hrk"] Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.699297 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.708471 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.708656 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.773697 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-config-data-custom\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.773825 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-internal-tls-certs\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.773848 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw8h5\" (UniqueName: \"kubernetes.io/projected/a99712e9-cab7-452c-9df1-d94b5c4d96af-kube-api-access-sw8h5\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.773877 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.773917 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-config-data\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.774054 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a99712e9-cab7-452c-9df1-d94b5c4d96af-logs\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.774076 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/202a617e-eb55-4702-8958-3502b6d8e91b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.774135 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.774153 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.774180 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-combined-ca-bundle\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.774202 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-config-data\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.774224 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-config-data-custom\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.774245 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-scripts\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.774264 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnjnf\" (UniqueName: \"kubernetes.io/projected/202a617e-eb55-4702-8958-3502b6d8e91b-kube-api-access-hnjnf\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.774282 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-public-tls-certs\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.774338 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/202a617e-eb55-4702-8958-3502b6d8e91b-logs\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.795195 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7df456d776-x4hrk"] Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.876199 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/202a617e-eb55-4702-8958-3502b6d8e91b-logs\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.876600 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-config-data-custom\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.876624 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw8h5\" (UniqueName: \"kubernetes.io/projected/a99712e9-cab7-452c-9df1-d94b5c4d96af-kube-api-access-sw8h5\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.876645 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-internal-tls-certs\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.876664 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.876692 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-config-data\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.876760 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a99712e9-cab7-452c-9df1-d94b5c4d96af-logs\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.876784 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/202a617e-eb55-4702-8958-3502b6d8e91b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.876811 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.876832 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.876852 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-combined-ca-bundle\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.876879 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-config-data\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.876903 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-config-data-custom\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.876922 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-scripts\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.876943 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnjnf\" (UniqueName: \"kubernetes.io/projected/202a617e-eb55-4702-8958-3502b6d8e91b-kube-api-access-hnjnf\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.876965 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-public-tls-certs\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.876938 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/202a617e-eb55-4702-8958-3502b6d8e91b-logs\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.880481 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/202a617e-eb55-4702-8958-3502b6d8e91b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.883550 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a99712e9-cab7-452c-9df1-d94b5c4d96af-logs\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.884020 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.884516 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-config-data-custom\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.888283 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-public-tls-certs\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.895979 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-config-data\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.896113 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-internal-tls-certs\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.899920 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.903568 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-scripts\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.903781 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.903984 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-config-data-custom\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.908341 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw8h5\" (UniqueName: \"kubernetes.io/projected/a99712e9-cab7-452c-9df1-d94b5c4d96af-kube-api-access-sw8h5\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.908465 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnjnf\" (UniqueName: \"kubernetes.io/projected/202a617e-eb55-4702-8958-3502b6d8e91b-kube-api-access-hnjnf\") pod \"cinder-api-0\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " pod="openstack/cinder-api-0" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.908996 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-combined-ca-bundle\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:43 crc kubenswrapper[4765]: I1210 07:09:43.909441 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-config-data\") pod \"barbican-api-7df456d776-x4hrk\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:44 crc kubenswrapper[4765]: I1210 07:09:44.039973 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 07:09:44 crc kubenswrapper[4765]: I1210 07:09:44.064892 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:44 crc kubenswrapper[4765]: I1210 07:09:44.303653 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e26110ee-0dc1-4658-98bf-4e017a4e5aca","Type":"ContainerStarted","Data":"abd821d68429dc9e298700ed9af49df7a0f429644f12d2f801124966e6ba2879"} Dec 10 07:09:44 crc kubenswrapper[4765]: W1210 07:09:44.599931 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod202a617e_eb55_4702_8958_3502b6d8e91b.slice/crio-d4429a815a02743ae623f3f63aad725c357eaadffc8a512562311b004ef73799 WatchSource:0}: Error finding container d4429a815a02743ae623f3f63aad725c357eaadffc8a512562311b004ef73799: Status 404 returned error can't find the container with id d4429a815a02743ae623f3f63aad725c357eaadffc8a512562311b004ef73799 Dec 10 07:09:44 crc kubenswrapper[4765]: I1210 07:09:44.611834 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0000725-1e94-4d0d-9891-2771ed36ade8" path="/var/lib/kubelet/pods/f0000725-1e94-4d0d-9891-2771ed36ade8/volumes" Dec 10 07:09:44 crc kubenswrapper[4765]: I1210 07:09:44.612700 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7df456d776-x4hrk"] Dec 10 07:09:44 crc kubenswrapper[4765]: I1210 07:09:44.612725 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 10 07:09:45 crc kubenswrapper[4765]: I1210 07:09:45.347326 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"202a617e-eb55-4702-8958-3502b6d8e91b","Type":"ContainerStarted","Data":"d4429a815a02743ae623f3f63aad725c357eaadffc8a512562311b004ef73799"} Dec 10 07:09:45 crc kubenswrapper[4765]: I1210 07:09:45.352016 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7df456d776-x4hrk" event={"ID":"a99712e9-cab7-452c-9df1-d94b5c4d96af","Type":"ContainerStarted","Data":"80d3af87b8b64fb1621fe28544a27dbb97a6487a3d0bf2b741bfbc88ebf45779"} Dec 10 07:09:45 crc kubenswrapper[4765]: I1210 07:09:45.352067 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7df456d776-x4hrk" event={"ID":"a99712e9-cab7-452c-9df1-d94b5c4d96af","Type":"ContainerStarted","Data":"1a544e35f2564f5d8ea228e06077634a6913a537f1e9cb12b09d75931027d228"} Dec 10 07:09:46 crc kubenswrapper[4765]: I1210 07:09:46.088539 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:09:46 crc kubenswrapper[4765]: I1210 07:09:46.166341 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-794648c68d-ljskq"] Dec 10 07:09:46 crc kubenswrapper[4765]: I1210 07:09:46.166582 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-794648c68d-ljskq" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" containerName="neutron-api" containerID="cri-o://e51c4964654e76eb69f07ecd911e2d3087a3f5c9e17b3f63395cb6fa78a3c82a" gracePeriod=30 Dec 10 07:09:46 crc kubenswrapper[4765]: I1210 07:09:46.368586 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7df456d776-x4hrk" event={"ID":"a99712e9-cab7-452c-9df1-d94b5c4d96af","Type":"ContainerStarted","Data":"2caeb9530028c3e6065c938e2b0efd49e4234ecf49bcff419b6a01776b982ffe"} Dec 10 07:09:46 crc kubenswrapper[4765]: I1210 07:09:46.368745 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:46 crc kubenswrapper[4765]: I1210 07:09:46.376000 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"202a617e-eb55-4702-8958-3502b6d8e91b","Type":"ContainerStarted","Data":"280f956e598734a0c953052f0bef830e41258f83b3ea961721502f3e42f78557"} Dec 10 07:09:46 crc kubenswrapper[4765]: I1210 07:09:46.379952 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e26110ee-0dc1-4658-98bf-4e017a4e5aca","Type":"ContainerStarted","Data":"94fab9d59fc3ebcf2a75efa38c07c7e63723d9b26094698b2a37cae74adf8360"} Dec 10 07:09:46 crc kubenswrapper[4765]: I1210 07:09:46.414406 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7df456d776-x4hrk" podStartSLOduration=3.414383219 podStartE2EDuration="3.414383219s" podCreationTimestamp="2025-12-10 07:09:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:09:46.400537225 +0000 UTC m=+1306.127202561" watchObservedRunningTime="2025-12-10 07:09:46.414383219 +0000 UTC m=+1306.141048535" Dec 10 07:09:46 crc kubenswrapper[4765]: I1210 07:09:46.742480 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 10 07:09:47 crc kubenswrapper[4765]: I1210 07:09:47.107616 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:09:47 crc kubenswrapper[4765]: I1210 07:09:47.222909 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f9987d8d9-gwj4t"] Dec 10 07:09:47 crc kubenswrapper[4765]: I1210 07:09:47.223311 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" podUID="9909c4a1-0c26-4db9-8f34-3e14ad438864" containerName="dnsmasq-dns" containerID="cri-o://350a7a37130177d4525343650de8cb2208f3925f7e5193aae5d150e8546d7bb4" gracePeriod=10 Dec 10 07:09:47 crc kubenswrapper[4765]: I1210 07:09:47.287547 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 10 07:09:47 crc kubenswrapper[4765]: I1210 07:09:47.397218 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:47 crc kubenswrapper[4765]: I1210 07:09:47.466920 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 07:09:48 crc kubenswrapper[4765]: I1210 07:09:48.019578 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" podUID="9909c4a1-0c26-4db9-8f34-3e14ad438864" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.150:5353: connect: connection refused" Dec 10 07:09:48 crc kubenswrapper[4765]: I1210 07:09:48.418886 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"202a617e-eb55-4702-8958-3502b6d8e91b","Type":"ContainerStarted","Data":"975b9afa0ea86dc2438e36b28f792b56c554b75ab840658b6921015304cc0b22"} Dec 10 07:09:48 crc kubenswrapper[4765]: I1210 07:09:48.430653 4765 generic.go:334] "Generic (PLEG): container finished" podID="9909c4a1-0c26-4db9-8f34-3e14ad438864" containerID="350a7a37130177d4525343650de8cb2208f3925f7e5193aae5d150e8546d7bb4" exitCode=0 Dec 10 07:09:48 crc kubenswrapper[4765]: I1210 07:09:48.430779 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" event={"ID":"9909c4a1-0c26-4db9-8f34-3e14ad438864","Type":"ContainerDied","Data":"350a7a37130177d4525343650de8cb2208f3925f7e5193aae5d150e8546d7bb4"} Dec 10 07:09:48 crc kubenswrapper[4765]: I1210 07:09:48.430993 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="13622500-18a7-45ab-9d6f-89b94db99e1c" containerName="probe" containerID="cri-o://2e056095c38de57f03f4892e50faa4f92ed00c89cfe3f5b322926d3011aae29a" gracePeriod=30 Dec 10 07:09:48 crc kubenswrapper[4765]: I1210 07:09:48.431126 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="13622500-18a7-45ab-9d6f-89b94db99e1c" containerName="cinder-scheduler" containerID="cri-o://97ad9d4730036a0561d67b4e64fd9114b22482fdd330ae076073e602aa5189b9" gracePeriod=30 Dec 10 07:09:48 crc kubenswrapper[4765]: I1210 07:09:48.647788 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:48 crc kubenswrapper[4765]: I1210 07:09:48.659636 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:09:48 crc kubenswrapper[4765]: I1210 07:09:48.850554 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:09:48 crc kubenswrapper[4765]: I1210 07:09:48.915529 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.014759 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-dns-svc\") pod \"9909c4a1-0c26-4db9-8f34-3e14ad438864\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.014922 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-ovsdbserver-nb\") pod \"9909c4a1-0c26-4db9-8f34-3e14ad438864\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.014955 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-config\") pod \"9909c4a1-0c26-4db9-8f34-3e14ad438864\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.014985 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-ovsdbserver-sb\") pod \"9909c4a1-0c26-4db9-8f34-3e14ad438864\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.015016 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzngq\" (UniqueName: \"kubernetes.io/projected/9909c4a1-0c26-4db9-8f34-3e14ad438864-kube-api-access-kzngq\") pod \"9909c4a1-0c26-4db9-8f34-3e14ad438864\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.028635 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9909c4a1-0c26-4db9-8f34-3e14ad438864-kube-api-access-kzngq" (OuterVolumeSpecName: "kube-api-access-kzngq") pod "9909c4a1-0c26-4db9-8f34-3e14ad438864" (UID: "9909c4a1-0c26-4db9-8f34-3e14ad438864"). InnerVolumeSpecName "kube-api-access-kzngq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.082004 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9909c4a1-0c26-4db9-8f34-3e14ad438864" (UID: "9909c4a1-0c26-4db9-8f34-3e14ad438864"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.112032 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-config" (OuterVolumeSpecName: "config") pod "9909c4a1-0c26-4db9-8f34-3e14ad438864" (UID: "9909c4a1-0c26-4db9-8f34-3e14ad438864"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.119946 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.119991 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.120008 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzngq\" (UniqueName: \"kubernetes.io/projected/9909c4a1-0c26-4db9-8f34-3e14ad438864-kube-api-access-kzngq\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:49 crc kubenswrapper[4765]: E1210 07:09:49.129375 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-ovsdbserver-nb podName:9909c4a1-0c26-4db9-8f34-3e14ad438864 nodeName:}" failed. No retries permitted until 2025-12-10 07:09:49.629341744 +0000 UTC m=+1309.356007060 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ovsdbserver-nb" (UniqueName: "kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-ovsdbserver-nb") pod "9909c4a1-0c26-4db9-8f34-3e14ad438864" (UID: "9909c4a1-0c26-4db9-8f34-3e14ad438864") : error deleting /var/lib/kubelet/pods/9909c4a1-0c26-4db9-8f34-3e14ad438864/volume-subpaths: remove /var/lib/kubelet/pods/9909c4a1-0c26-4db9-8f34-3e14ad438864/volume-subpaths: no such file or directory Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.129613 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9909c4a1-0c26-4db9-8f34-3e14ad438864" (UID: "9909c4a1-0c26-4db9-8f34-3e14ad438864"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.221941 4765 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.375789 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-794648c68d-ljskq_cc86d6d5-ff4f-4d50-9587-b6661e7ac16f/neutron-httpd/2.log" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.377940 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.447879 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e26110ee-0dc1-4658-98bf-4e017a4e5aca","Type":"ContainerStarted","Data":"ba5aa72180c965699311bae1545849498ef4ec359f1bd131f125992d095ed1a8"} Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.448224 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.460309 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-794648c68d-ljskq_cc86d6d5-ff4f-4d50-9587-b6661e7ac16f/neutron-httpd/2.log" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.460781 4765 generic.go:334] "Generic (PLEG): container finished" podID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" containerID="e51c4964654e76eb69f07ecd911e2d3087a3f5c9e17b3f63395cb6fa78a3c82a" exitCode=0 Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.460851 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-794648c68d-ljskq" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.460901 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-794648c68d-ljskq" event={"ID":"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f","Type":"ContainerDied","Data":"e51c4964654e76eb69f07ecd911e2d3087a3f5c9e17b3f63395cb6fa78a3c82a"} Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.460940 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-794648c68d-ljskq" event={"ID":"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f","Type":"ContainerDied","Data":"06c54fb988dcd93b7882a5423a7449834a98f17281e6700abdda1662c46b06c8"} Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.460980 4765 scope.go:117] "RemoveContainer" containerID="e895e33ce929e1f5ab9d76d0803b6350b42f1b0e2643996a743f8e054820111b" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.473632 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.473872 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f9987d8d9-gwj4t" event={"ID":"9909c4a1-0c26-4db9-8f34-3e14ad438864","Type":"ContainerDied","Data":"4917e027ab6c544f5c4df83c4be3cef81ad6d1c3fa4c06641c9949cf7cd8de18"} Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.474463 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.477644 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.301641708 podStartE2EDuration="8.477598051s" podCreationTimestamp="2025-12-10 07:09:41 +0000 UTC" firstStartedPulling="2025-12-10 07:09:42.271486252 +0000 UTC m=+1301.998151568" lastFinishedPulling="2025-12-10 07:09:48.447442595 +0000 UTC m=+1308.174107911" observedRunningTime="2025-12-10 07:09:49.475597894 +0000 UTC m=+1309.202263220" watchObservedRunningTime="2025-12-10 07:09:49.477598051 +0000 UTC m=+1309.204263367" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.514722 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.514698287 podStartE2EDuration="6.514698287s" podCreationTimestamp="2025-12-10 07:09:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:09:49.498469515 +0000 UTC m=+1309.225134831" watchObservedRunningTime="2025-12-10 07:09:49.514698287 +0000 UTC m=+1309.241363603" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.528543 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-ovndb-tls-certs\") pod \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.528589 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xsmzz\" (UniqueName: \"kubernetes.io/projected/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-kube-api-access-xsmzz\") pod \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.528829 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-httpd-config\") pod \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.528867 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-combined-ca-bundle\") pod \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.528910 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-config\") pod \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\" (UID: \"cc86d6d5-ff4f-4d50-9587-b6661e7ac16f\") " Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.536971 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" (UID: "cc86d6d5-ff4f-4d50-9587-b6661e7ac16f"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.553359 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-kube-api-access-xsmzz" (OuterVolumeSpecName: "kube-api-access-xsmzz") pod "cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" (UID: "cc86d6d5-ff4f-4d50-9587-b6661e7ac16f"). InnerVolumeSpecName "kube-api-access-xsmzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.572032 4765 scope.go:117] "RemoveContainer" containerID="e51c4964654e76eb69f07ecd911e2d3087a3f5c9e17b3f63395cb6fa78a3c82a" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.632503 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-ovsdbserver-nb\") pod \"9909c4a1-0c26-4db9-8f34-3e14ad438864\" (UID: \"9909c4a1-0c26-4db9-8f34-3e14ad438864\") " Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.639971 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9909c4a1-0c26-4db9-8f34-3e14ad438864" (UID: "9909c4a1-0c26-4db9-8f34-3e14ad438864"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.654578 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xsmzz\" (UniqueName: \"kubernetes.io/projected/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-kube-api-access-xsmzz\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.654634 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9909c4a1-0c26-4db9-8f34-3e14ad438864-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.654646 4765 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.676980 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" (UID: "cc86d6d5-ff4f-4d50-9587-b6661e7ac16f"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.689269 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-config" (OuterVolumeSpecName: "config") pod "cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" (UID: "cc86d6d5-ff4f-4d50-9587-b6661e7ac16f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.690463 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" (UID: "cc86d6d5-ff4f-4d50-9587-b6661e7ac16f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.746730 4765 scope.go:117] "RemoveContainer" containerID="e895e33ce929e1f5ab9d76d0803b6350b42f1b0e2643996a743f8e054820111b" Dec 10 07:09:49 crc kubenswrapper[4765]: E1210 07:09:49.747429 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e895e33ce929e1f5ab9d76d0803b6350b42f1b0e2643996a743f8e054820111b\": container with ID starting with e895e33ce929e1f5ab9d76d0803b6350b42f1b0e2643996a743f8e054820111b not found: ID does not exist" containerID="e895e33ce929e1f5ab9d76d0803b6350b42f1b0e2643996a743f8e054820111b" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.747482 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e895e33ce929e1f5ab9d76d0803b6350b42f1b0e2643996a743f8e054820111b"} err="failed to get container status \"e895e33ce929e1f5ab9d76d0803b6350b42f1b0e2643996a743f8e054820111b\": rpc error: code = NotFound desc = could not find container \"e895e33ce929e1f5ab9d76d0803b6350b42f1b0e2643996a743f8e054820111b\": container with ID starting with e895e33ce929e1f5ab9d76d0803b6350b42f1b0e2643996a743f8e054820111b not found: ID does not exist" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.747514 4765 scope.go:117] "RemoveContainer" containerID="e51c4964654e76eb69f07ecd911e2d3087a3f5c9e17b3f63395cb6fa78a3c82a" Dec 10 07:09:49 crc kubenswrapper[4765]: E1210 07:09:49.747855 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e51c4964654e76eb69f07ecd911e2d3087a3f5c9e17b3f63395cb6fa78a3c82a\": container with ID starting with e51c4964654e76eb69f07ecd911e2d3087a3f5c9e17b3f63395cb6fa78a3c82a not found: ID does not exist" containerID="e51c4964654e76eb69f07ecd911e2d3087a3f5c9e17b3f63395cb6fa78a3c82a" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.747903 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e51c4964654e76eb69f07ecd911e2d3087a3f5c9e17b3f63395cb6fa78a3c82a"} err="failed to get container status \"e51c4964654e76eb69f07ecd911e2d3087a3f5c9e17b3f63395cb6fa78a3c82a\": rpc error: code = NotFound desc = could not find container \"e51c4964654e76eb69f07ecd911e2d3087a3f5c9e17b3f63395cb6fa78a3c82a\": container with ID starting with e51c4964654e76eb69f07ecd911e2d3087a3f5c9e17b3f63395cb6fa78a3c82a not found: ID does not exist" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.747922 4765 scope.go:117] "RemoveContainer" containerID="350a7a37130177d4525343650de8cb2208f3925f7e5193aae5d150e8546d7bb4" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.757806 4765 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.757859 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.757871 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.779246 4765 scope.go:117] "RemoveContainer" containerID="8ba9684d3435246fb54936824b9d524ed419d26cebfcd4b10924f5ef83e48f06" Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.816976 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-794648c68d-ljskq"] Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.837885 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-794648c68d-ljskq"] Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.849461 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f9987d8d9-gwj4t"] Dec 10 07:09:49 crc kubenswrapper[4765]: I1210 07:09:49.878698 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f9987d8d9-gwj4t"] Dec 10 07:09:50 crc kubenswrapper[4765]: I1210 07:09:50.484621 4765 generic.go:334] "Generic (PLEG): container finished" podID="13622500-18a7-45ab-9d6f-89b94db99e1c" containerID="2e056095c38de57f03f4892e50faa4f92ed00c89cfe3f5b322926d3011aae29a" exitCode=0 Dec 10 07:09:50 crc kubenswrapper[4765]: I1210 07:09:50.484709 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"13622500-18a7-45ab-9d6f-89b94db99e1c","Type":"ContainerDied","Data":"2e056095c38de57f03f4892e50faa4f92ed00c89cfe3f5b322926d3011aae29a"} Dec 10 07:09:50 crc kubenswrapper[4765]: I1210 07:09:50.603111 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9909c4a1-0c26-4db9-8f34-3e14ad438864" path="/var/lib/kubelet/pods/9909c4a1-0c26-4db9-8f34-3e14ad438864/volumes" Dec 10 07:09:50 crc kubenswrapper[4765]: I1210 07:09:50.603727 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" path="/var/lib/kubelet/pods/cc86d6d5-ff4f-4d50-9587-b6661e7ac16f/volumes" Dec 10 07:09:51 crc kubenswrapper[4765]: I1210 07:09:51.335019 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:09:52 crc kubenswrapper[4765]: I1210 07:09:52.515239 4765 generic.go:334] "Generic (PLEG): container finished" podID="13622500-18a7-45ab-9d6f-89b94db99e1c" containerID="97ad9d4730036a0561d67b4e64fd9114b22482fdd330ae076073e602aa5189b9" exitCode=0 Dec 10 07:09:52 crc kubenswrapper[4765]: I1210 07:09:52.515537 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"13622500-18a7-45ab-9d6f-89b94db99e1c","Type":"ContainerDied","Data":"97ad9d4730036a0561d67b4e64fd9114b22482fdd330ae076073e602aa5189b9"} Dec 10 07:09:52 crc kubenswrapper[4765]: I1210 07:09:52.905377 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 07:09:52 crc kubenswrapper[4765]: I1210 07:09:52.915108 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/13622500-18a7-45ab-9d6f-89b94db99e1c-etc-machine-id\") pod \"13622500-18a7-45ab-9d6f-89b94db99e1c\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " Dec 10 07:09:52 crc kubenswrapper[4765]: I1210 07:09:52.915229 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-combined-ca-bundle\") pod \"13622500-18a7-45ab-9d6f-89b94db99e1c\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " Dec 10 07:09:52 crc kubenswrapper[4765]: I1210 07:09:52.915259 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-config-data\") pod \"13622500-18a7-45ab-9d6f-89b94db99e1c\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " Dec 10 07:09:52 crc kubenswrapper[4765]: I1210 07:09:52.915268 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/13622500-18a7-45ab-9d6f-89b94db99e1c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "13622500-18a7-45ab-9d6f-89b94db99e1c" (UID: "13622500-18a7-45ab-9d6f-89b94db99e1c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:09:52 crc kubenswrapper[4765]: I1210 07:09:52.916485 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6j6t5\" (UniqueName: \"kubernetes.io/projected/13622500-18a7-45ab-9d6f-89b94db99e1c-kube-api-access-6j6t5\") pod \"13622500-18a7-45ab-9d6f-89b94db99e1c\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " Dec 10 07:09:52 crc kubenswrapper[4765]: I1210 07:09:52.916576 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-scripts\") pod \"13622500-18a7-45ab-9d6f-89b94db99e1c\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " Dec 10 07:09:52 crc kubenswrapper[4765]: I1210 07:09:52.917572 4765 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/13622500-18a7-45ab-9d6f-89b94db99e1c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:52 crc kubenswrapper[4765]: I1210 07:09:52.926649 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-scripts" (OuterVolumeSpecName: "scripts") pod "13622500-18a7-45ab-9d6f-89b94db99e1c" (UID: "13622500-18a7-45ab-9d6f-89b94db99e1c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:52 crc kubenswrapper[4765]: I1210 07:09:52.939945 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13622500-18a7-45ab-9d6f-89b94db99e1c-kube-api-access-6j6t5" (OuterVolumeSpecName: "kube-api-access-6j6t5") pod "13622500-18a7-45ab-9d6f-89b94db99e1c" (UID: "13622500-18a7-45ab-9d6f-89b94db99e1c"). InnerVolumeSpecName "kube-api-access-6j6t5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.022756 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-config-data-custom\") pod \"13622500-18a7-45ab-9d6f-89b94db99e1c\" (UID: \"13622500-18a7-45ab-9d6f-89b94db99e1c\") " Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.024027 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6j6t5\" (UniqueName: \"kubernetes.io/projected/13622500-18a7-45ab-9d6f-89b94db99e1c-kube-api-access-6j6t5\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.024070 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.030476 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "13622500-18a7-45ab-9d6f-89b94db99e1c" (UID: "13622500-18a7-45ab-9d6f-89b94db99e1c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.115217 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "13622500-18a7-45ab-9d6f-89b94db99e1c" (UID: "13622500-18a7-45ab-9d6f-89b94db99e1c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.129132 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.129418 4765 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.146455 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-config-data" (OuterVolumeSpecName: "config-data") pod "13622500-18a7-45ab-9d6f-89b94db99e1c" (UID: "13622500-18a7-45ab-9d6f-89b94db99e1c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.231653 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13622500-18a7-45ab-9d6f-89b94db99e1c-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.532931 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"13622500-18a7-45ab-9d6f-89b94db99e1c","Type":"ContainerDied","Data":"1fbcf41eaee83ed3f2630711ce06e02ad812b1fe995bfb058d35fd46eead157a"} Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.533426 4765 scope.go:117] "RemoveContainer" containerID="2e056095c38de57f03f4892e50faa4f92ed00c89cfe3f5b322926d3011aae29a" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.533146 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.578866 4765 scope.go:117] "RemoveContainer" containerID="97ad9d4730036a0561d67b4e64fd9114b22482fdd330ae076073e602aa5189b9" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.597243 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.605457 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.628574 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 07:09:53 crc kubenswrapper[4765]: E1210 07:09:53.629054 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13622500-18a7-45ab-9d6f-89b94db99e1c" containerName="probe" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.629075 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="13622500-18a7-45ab-9d6f-89b94db99e1c" containerName="probe" Dec 10 07:09:53 crc kubenswrapper[4765]: E1210 07:09:53.629102 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" containerName="neutron-api" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.629109 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" containerName="neutron-api" Dec 10 07:09:53 crc kubenswrapper[4765]: E1210 07:09:53.629120 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" containerName="neutron-httpd" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.629129 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" containerName="neutron-httpd" Dec 10 07:09:53 crc kubenswrapper[4765]: E1210 07:09:53.629140 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" containerName="neutron-httpd" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.629146 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" containerName="neutron-httpd" Dec 10 07:09:53 crc kubenswrapper[4765]: E1210 07:09:53.629154 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13622500-18a7-45ab-9d6f-89b94db99e1c" containerName="cinder-scheduler" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.629160 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="13622500-18a7-45ab-9d6f-89b94db99e1c" containerName="cinder-scheduler" Dec 10 07:09:53 crc kubenswrapper[4765]: E1210 07:09:53.629169 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9909c4a1-0c26-4db9-8f34-3e14ad438864" containerName="init" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.629175 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="9909c4a1-0c26-4db9-8f34-3e14ad438864" containerName="init" Dec 10 07:09:53 crc kubenswrapper[4765]: E1210 07:09:53.629190 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9909c4a1-0c26-4db9-8f34-3e14ad438864" containerName="dnsmasq-dns" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.629196 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="9909c4a1-0c26-4db9-8f34-3e14ad438864" containerName="dnsmasq-dns" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.629392 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="13622500-18a7-45ab-9d6f-89b94db99e1c" containerName="cinder-scheduler" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.629408 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="9909c4a1-0c26-4db9-8f34-3e14ad438864" containerName="dnsmasq-dns" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.629419 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" containerName="neutron-api" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.629432 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" containerName="neutron-httpd" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.629443 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="13622500-18a7-45ab-9d6f-89b94db99e1c" containerName="probe" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.629456 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" containerName="neutron-httpd" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.629462 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" containerName="neutron-httpd" Dec 10 07:09:53 crc kubenswrapper[4765]: E1210 07:09:53.629626 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" containerName="neutron-httpd" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.629633 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc86d6d5-ff4f-4d50-9587-b6661e7ac16f" containerName="neutron-httpd" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.630454 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 07:09:53 crc kubenswrapper[4765]: W1210 07:09:53.633634 4765 reflector.go:561] object-"openstack"/"cinder-scheduler-config-data": failed to list *v1.Secret: secrets "cinder-scheduler-config-data" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Dec 10 07:09:53 crc kubenswrapper[4765]: E1210 07:09:53.633708 4765 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"cinder-scheduler-config-data\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"cinder-scheduler-config-data\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.641667 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-scripts\") pod \"cinder-scheduler-0\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.641926 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-config-data\") pod \"cinder-scheduler-0\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.642037 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4406d4e6-b2a9-4e81-9672-b54775fad3bb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.642160 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.642234 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gjwm\" (UniqueName: \"kubernetes.io/projected/4406d4e6-b2a9-4e81-9672-b54775fad3bb-kube-api-access-6gjwm\") pod \"cinder-scheduler-0\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.642393 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.646693 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.745397 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-scripts\") pod \"cinder-scheduler-0\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.745466 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-config-data\") pod \"cinder-scheduler-0\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.745534 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4406d4e6-b2a9-4e81-9672-b54775fad3bb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.745629 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4406d4e6-b2a9-4e81-9672-b54775fad3bb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.745714 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.745860 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gjwm\" (UniqueName: \"kubernetes.io/projected/4406d4e6-b2a9-4e81-9672-b54775fad3bb-kube-api-access-6gjwm\") pod \"cinder-scheduler-0\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.745990 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.750196 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-scripts\") pod \"cinder-scheduler-0\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.752234 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-config-data\") pod \"cinder-scheduler-0\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.753724 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:53 crc kubenswrapper[4765]: I1210 07:09:53.765405 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gjwm\" (UniqueName: \"kubernetes.io/projected/4406d4e6-b2a9-4e81-9672-b54775fad3bb-kube-api-access-6gjwm\") pod \"cinder-scheduler-0\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:54 crc kubenswrapper[4765]: I1210 07:09:54.511043 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 10 07:09:54 crc kubenswrapper[4765]: I1210 07:09:54.526899 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " pod="openstack/cinder-scheduler-0" Dec 10 07:09:54 crc kubenswrapper[4765]: I1210 07:09:54.579122 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 07:09:54 crc kubenswrapper[4765]: I1210 07:09:54.600049 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13622500-18a7-45ab-9d6f-89b94db99e1c" path="/var/lib/kubelet/pods/13622500-18a7-45ab-9d6f-89b94db99e1c/volumes" Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.196393 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.562790 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4406d4e6-b2a9-4e81-9672-b54775fad3bb","Type":"ContainerStarted","Data":"3fddf3e012dcfcd3c7c9523db777474ace50b2f2a34da659dd237c24ded64b53"} Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.663254 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.665307 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.670800 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.670805 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.672898 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-rs8k8" Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.696140 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3d46af4c-da3b-47cb-a069-cb978f0df610-openstack-config\") pod \"openstackclient\" (UID: \"3d46af4c-da3b-47cb-a069-cb978f0df610\") " pod="openstack/openstackclient" Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.696209 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3d46af4c-da3b-47cb-a069-cb978f0df610-openstack-config-secret\") pod \"openstackclient\" (UID: \"3d46af4c-da3b-47cb-a069-cb978f0df610\") " pod="openstack/openstackclient" Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.696270 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d46af4c-da3b-47cb-a069-cb978f0df610-combined-ca-bundle\") pod \"openstackclient\" (UID: \"3d46af4c-da3b-47cb-a069-cb978f0df610\") " pod="openstack/openstackclient" Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.696331 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7h2kf\" (UniqueName: \"kubernetes.io/projected/3d46af4c-da3b-47cb-a069-cb978f0df610-kube-api-access-7h2kf\") pod \"openstackclient\" (UID: \"3d46af4c-da3b-47cb-a069-cb978f0df610\") " pod="openstack/openstackclient" Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.697363 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.798862 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3d46af4c-da3b-47cb-a069-cb978f0df610-openstack-config\") pod \"openstackclient\" (UID: \"3d46af4c-da3b-47cb-a069-cb978f0df610\") " pod="openstack/openstackclient" Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.799505 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3d46af4c-da3b-47cb-a069-cb978f0df610-openstack-config-secret\") pod \"openstackclient\" (UID: \"3d46af4c-da3b-47cb-a069-cb978f0df610\") " pod="openstack/openstackclient" Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.799645 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d46af4c-da3b-47cb-a069-cb978f0df610-combined-ca-bundle\") pod \"openstackclient\" (UID: \"3d46af4c-da3b-47cb-a069-cb978f0df610\") " pod="openstack/openstackclient" Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.799783 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7h2kf\" (UniqueName: \"kubernetes.io/projected/3d46af4c-da3b-47cb-a069-cb978f0df610-kube-api-access-7h2kf\") pod \"openstackclient\" (UID: \"3d46af4c-da3b-47cb-a069-cb978f0df610\") " pod="openstack/openstackclient" Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.800025 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3d46af4c-da3b-47cb-a069-cb978f0df610-openstack-config\") pod \"openstackclient\" (UID: \"3d46af4c-da3b-47cb-a069-cb978f0df610\") " pod="openstack/openstackclient" Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.808030 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3d46af4c-da3b-47cb-a069-cb978f0df610-openstack-config-secret\") pod \"openstackclient\" (UID: \"3d46af4c-da3b-47cb-a069-cb978f0df610\") " pod="openstack/openstackclient" Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.820916 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d46af4c-da3b-47cb-a069-cb978f0df610-combined-ca-bundle\") pod \"openstackclient\" (UID: \"3d46af4c-da3b-47cb-a069-cb978f0df610\") " pod="openstack/openstackclient" Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.821034 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7h2kf\" (UniqueName: \"kubernetes.io/projected/3d46af4c-da3b-47cb-a069-cb978f0df610-kube-api-access-7h2kf\") pod \"openstackclient\" (UID: \"3d46af4c-da3b-47cb-a069-cb978f0df610\") " pod="openstack/openstackclient" Dec 10 07:09:55 crc kubenswrapper[4765]: I1210 07:09:55.998748 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 10 07:09:56 crc kubenswrapper[4765]: I1210 07:09:56.585830 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4406d4e6-b2a9-4e81-9672-b54775fad3bb","Type":"ContainerStarted","Data":"cb79bead574a5e96dc6ceab7d7db16192f7a0ba03fb849fa04afc06f3a7a41d8"} Dec 10 07:09:56 crc kubenswrapper[4765]: I1210 07:09:56.751561 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:56 crc kubenswrapper[4765]: I1210 07:09:56.788594 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 10 07:09:57 crc kubenswrapper[4765]: I1210 07:09:57.024941 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:09:57 crc kubenswrapper[4765]: I1210 07:09:57.025834 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 10 07:09:57 crc kubenswrapper[4765]: I1210 07:09:57.155399 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-679d966d56-j58pb"] Dec 10 07:09:57 crc kubenswrapper[4765]: I1210 07:09:57.161182 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-679d966d56-j58pb" podUID="d3683f1d-a6e8-4762-abad-02773d41261a" containerName="barbican-api-log" containerID="cri-o://62d00604671a09aa223cc9c4a3bd0d4cf2417abde160a2945c973d7f113448e0" gracePeriod=30 Dec 10 07:09:57 crc kubenswrapper[4765]: I1210 07:09:57.161464 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-679d966d56-j58pb" podUID="d3683f1d-a6e8-4762-abad-02773d41261a" containerName="barbican-api" containerID="cri-o://fa4a19213f360b235a931754d45d7ae8bc502d8558d314f8238e32784ca0d010" gracePeriod=30 Dec 10 07:09:57 crc kubenswrapper[4765]: I1210 07:09:57.600430 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4406d4e6-b2a9-4e81-9672-b54775fad3bb","Type":"ContainerStarted","Data":"ef36e8ca9904a20d9e0bedad569a152dc56cb8af69b2e6ed8a76e1fc5c40bec5"} Dec 10 07:09:57 crc kubenswrapper[4765]: I1210 07:09:57.610559 4765 generic.go:334] "Generic (PLEG): container finished" podID="d3683f1d-a6e8-4762-abad-02773d41261a" containerID="62d00604671a09aa223cc9c4a3bd0d4cf2417abde160a2945c973d7f113448e0" exitCode=143 Dec 10 07:09:57 crc kubenswrapper[4765]: I1210 07:09:57.610637 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-679d966d56-j58pb" event={"ID":"d3683f1d-a6e8-4762-abad-02773d41261a","Type":"ContainerDied","Data":"62d00604671a09aa223cc9c4a3bd0d4cf2417abde160a2945c973d7f113448e0"} Dec 10 07:09:57 crc kubenswrapper[4765]: I1210 07:09:57.615144 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"3d46af4c-da3b-47cb-a069-cb978f0df610","Type":"ContainerStarted","Data":"50db803daa146a594bb6488fddb0a232eb06f28ac4093c527ee7395bc8f1caa6"} Dec 10 07:09:57 crc kubenswrapper[4765]: I1210 07:09:57.647767 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.647741675 podStartE2EDuration="4.647741675s" podCreationTimestamp="2025-12-10 07:09:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:09:57.634119997 +0000 UTC m=+1317.360785303" watchObservedRunningTime="2025-12-10 07:09:57.647741675 +0000 UTC m=+1317.374407001" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.580007 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.721702 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-c976d8757-xmthg"] Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.725377 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.729312 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.730958 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.731121 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.732262 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-c976d8757-xmthg"] Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.743891 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74qx4\" (UniqueName: \"kubernetes.io/projected/cfc6fea8-973e-42c9-9482-a4853abec6c1-kube-api-access-74qx4\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.743979 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfc6fea8-973e-42c9-9482-a4853abec6c1-log-httpd\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.744196 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cfc6fea8-973e-42c9-9482-a4853abec6c1-etc-swift\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.744224 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfc6fea8-973e-42c9-9482-a4853abec6c1-run-httpd\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.744302 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-config-data\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.744357 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-internal-tls-certs\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.744417 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-combined-ca-bundle\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.744489 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-public-tls-certs\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.845179 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-combined-ca-bundle\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.846331 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-public-tls-certs\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.846437 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74qx4\" (UniqueName: \"kubernetes.io/projected/cfc6fea8-973e-42c9-9482-a4853abec6c1-kube-api-access-74qx4\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.846468 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfc6fea8-973e-42c9-9482-a4853abec6c1-log-httpd\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.846761 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cfc6fea8-973e-42c9-9482-a4853abec6c1-etc-swift\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.846871 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfc6fea8-973e-42c9-9482-a4853abec6c1-run-httpd\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.847031 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-config-data\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.847153 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-internal-tls-certs\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.847243 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfc6fea8-973e-42c9-9482-a4853abec6c1-log-httpd\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.847708 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfc6fea8-973e-42c9-9482-a4853abec6c1-run-httpd\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.851486 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-internal-tls-certs\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.852351 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-config-data\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.852405 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-combined-ca-bundle\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.853226 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-public-tls-certs\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.863634 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cfc6fea8-973e-42c9-9482-a4853abec6c1-etc-swift\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:09:59 crc kubenswrapper[4765]: I1210 07:09:59.867056 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74qx4\" (UniqueName: \"kubernetes.io/projected/cfc6fea8-973e-42c9-9482-a4853abec6c1-kube-api-access-74qx4\") pod \"swift-proxy-c976d8757-xmthg\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:10:00 crc kubenswrapper[4765]: I1210 07:10:00.058785 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:10:00 crc kubenswrapper[4765]: I1210 07:10:00.692452 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-c976d8757-xmthg"] Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.110260 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.200213 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3683f1d-a6e8-4762-abad-02773d41261a-config-data\") pod \"d3683f1d-a6e8-4762-abad-02773d41261a\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.200287 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3683f1d-a6e8-4762-abad-02773d41261a-combined-ca-bundle\") pod \"d3683f1d-a6e8-4762-abad-02773d41261a\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.201144 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fthk\" (UniqueName: \"kubernetes.io/projected/d3683f1d-a6e8-4762-abad-02773d41261a-kube-api-access-6fthk\") pod \"d3683f1d-a6e8-4762-abad-02773d41261a\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.201256 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3683f1d-a6e8-4762-abad-02773d41261a-logs\") pod \"d3683f1d-a6e8-4762-abad-02773d41261a\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.201327 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d3683f1d-a6e8-4762-abad-02773d41261a-config-data-custom\") pod \"d3683f1d-a6e8-4762-abad-02773d41261a\" (UID: \"d3683f1d-a6e8-4762-abad-02773d41261a\") " Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.202744 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3683f1d-a6e8-4762-abad-02773d41261a-logs" (OuterVolumeSpecName: "logs") pod "d3683f1d-a6e8-4762-abad-02773d41261a" (UID: "d3683f1d-a6e8-4762-abad-02773d41261a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.218310 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3683f1d-a6e8-4762-abad-02773d41261a-kube-api-access-6fthk" (OuterVolumeSpecName: "kube-api-access-6fthk") pod "d3683f1d-a6e8-4762-abad-02773d41261a" (UID: "d3683f1d-a6e8-4762-abad-02773d41261a"). InnerVolumeSpecName "kube-api-access-6fthk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.239279 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3683f1d-a6e8-4762-abad-02773d41261a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d3683f1d-a6e8-4762-abad-02773d41261a" (UID: "d3683f1d-a6e8-4762-abad-02773d41261a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.268276 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3683f1d-a6e8-4762-abad-02773d41261a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d3683f1d-a6e8-4762-abad-02773d41261a" (UID: "d3683f1d-a6e8-4762-abad-02773d41261a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.299429 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3683f1d-a6e8-4762-abad-02773d41261a-config-data" (OuterVolumeSpecName: "config-data") pod "d3683f1d-a6e8-4762-abad-02773d41261a" (UID: "d3683f1d-a6e8-4762-abad-02773d41261a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.303800 4765 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d3683f1d-a6e8-4762-abad-02773d41261a-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.303831 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3683f1d-a6e8-4762-abad-02773d41261a-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.303841 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3683f1d-a6e8-4762-abad-02773d41261a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.303852 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fthk\" (UniqueName: \"kubernetes.io/projected/d3683f1d-a6e8-4762-abad-02773d41261a-kube-api-access-6fthk\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.303863 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3683f1d-a6e8-4762-abad-02773d41261a-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.307835 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.308138 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerName="ceilometer-central-agent" containerID="cri-o://14c39c735aa99b8b06f954400fcc21893b87cf7018fa8c0f8996297d70fa51b1" gracePeriod=30 Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.308201 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerName="proxy-httpd" containerID="cri-o://ba5aa72180c965699311bae1545849498ef4ec359f1bd131f125992d095ed1a8" gracePeriod=30 Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.308388 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerName="sg-core" containerID="cri-o://94fab9d59fc3ebcf2a75efa38c07c7e63723d9b26094698b2a37cae74adf8360" gracePeriod=30 Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.308450 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerName="ceilometer-notification-agent" containerID="cri-o://abd821d68429dc9e298700ed9af49df7a0f429644f12d2f801124966e6ba2879" gracePeriod=30 Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.320720 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.670696 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-c976d8757-xmthg" event={"ID":"cfc6fea8-973e-42c9-9482-a4853abec6c1","Type":"ContainerStarted","Data":"4fc9345636a9c7222a93a22391487b0745bc3710f4331dd26db99b3758d3c34b"} Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.671530 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-c976d8757-xmthg" event={"ID":"cfc6fea8-973e-42c9-9482-a4853abec6c1","Type":"ContainerStarted","Data":"e8fbadda4c283736804707e4d6088058b572b8678b484fe3d73e76e090942c5b"} Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.671557 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-c976d8757-xmthg" event={"ID":"cfc6fea8-973e-42c9-9482-a4853abec6c1","Type":"ContainerStarted","Data":"9535eb635c22cb790bf8517ca738bd19e588c4aeba60d667a7af393f210d9bdb"} Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.671617 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.671644 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.674621 4765 generic.go:334] "Generic (PLEG): container finished" podID="d3683f1d-a6e8-4762-abad-02773d41261a" containerID="fa4a19213f360b235a931754d45d7ae8bc502d8558d314f8238e32784ca0d010" exitCode=0 Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.674678 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-679d966d56-j58pb" event={"ID":"d3683f1d-a6e8-4762-abad-02773d41261a","Type":"ContainerDied","Data":"fa4a19213f360b235a931754d45d7ae8bc502d8558d314f8238e32784ca0d010"} Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.674701 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-679d966d56-j58pb" event={"ID":"d3683f1d-a6e8-4762-abad-02773d41261a","Type":"ContainerDied","Data":"70c146928da7955eaa2e9607e62ecefd6d836391475af546dfd81f8893d859e8"} Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.674760 4765 scope.go:117] "RemoveContainer" containerID="fa4a19213f360b235a931754d45d7ae8bc502d8558d314f8238e32784ca0d010" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.674928 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-679d966d56-j58pb" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.690444 4765 generic.go:334] "Generic (PLEG): container finished" podID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerID="94fab9d59fc3ebcf2a75efa38c07c7e63723d9b26094698b2a37cae74adf8360" exitCode=2 Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.690508 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e26110ee-0dc1-4658-98bf-4e017a4e5aca","Type":"ContainerDied","Data":"94fab9d59fc3ebcf2a75efa38c07c7e63723d9b26094698b2a37cae74adf8360"} Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.712167 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-c976d8757-xmthg" podStartSLOduration=2.712122887 podStartE2EDuration="2.712122887s" podCreationTimestamp="2025-12-10 07:09:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:10:01.704712066 +0000 UTC m=+1321.431377402" watchObservedRunningTime="2025-12-10 07:10:01.712122887 +0000 UTC m=+1321.438788203" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.725397 4765 scope.go:117] "RemoveContainer" containerID="62d00604671a09aa223cc9c4a3bd0d4cf2417abde160a2945c973d7f113448e0" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.729787 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-679d966d56-j58pb"] Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.740185 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-679d966d56-j58pb"] Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.751618 4765 scope.go:117] "RemoveContainer" containerID="fa4a19213f360b235a931754d45d7ae8bc502d8558d314f8238e32784ca0d010" Dec 10 07:10:01 crc kubenswrapper[4765]: E1210 07:10:01.752578 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa4a19213f360b235a931754d45d7ae8bc502d8558d314f8238e32784ca0d010\": container with ID starting with fa4a19213f360b235a931754d45d7ae8bc502d8558d314f8238e32784ca0d010 not found: ID does not exist" containerID="fa4a19213f360b235a931754d45d7ae8bc502d8558d314f8238e32784ca0d010" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.752616 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa4a19213f360b235a931754d45d7ae8bc502d8558d314f8238e32784ca0d010"} err="failed to get container status \"fa4a19213f360b235a931754d45d7ae8bc502d8558d314f8238e32784ca0d010\": rpc error: code = NotFound desc = could not find container \"fa4a19213f360b235a931754d45d7ae8bc502d8558d314f8238e32784ca0d010\": container with ID starting with fa4a19213f360b235a931754d45d7ae8bc502d8558d314f8238e32784ca0d010 not found: ID does not exist" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.752643 4765 scope.go:117] "RemoveContainer" containerID="62d00604671a09aa223cc9c4a3bd0d4cf2417abde160a2945c973d7f113448e0" Dec 10 07:10:01 crc kubenswrapper[4765]: E1210 07:10:01.753130 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62d00604671a09aa223cc9c4a3bd0d4cf2417abde160a2945c973d7f113448e0\": container with ID starting with 62d00604671a09aa223cc9c4a3bd0d4cf2417abde160a2945c973d7f113448e0 not found: ID does not exist" containerID="62d00604671a09aa223cc9c4a3bd0d4cf2417abde160a2945c973d7f113448e0" Dec 10 07:10:01 crc kubenswrapper[4765]: I1210 07:10:01.753152 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62d00604671a09aa223cc9c4a3bd0d4cf2417abde160a2945c973d7f113448e0"} err="failed to get container status \"62d00604671a09aa223cc9c4a3bd0d4cf2417abde160a2945c973d7f113448e0\": rpc error: code = NotFound desc = could not find container \"62d00604671a09aa223cc9c4a3bd0d4cf2417abde160a2945c973d7f113448e0\": container with ID starting with 62d00604671a09aa223cc9c4a3bd0d4cf2417abde160a2945c973d7f113448e0 not found: ID does not exist" Dec 10 07:10:02 crc kubenswrapper[4765]: I1210 07:10:02.602330 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3683f1d-a6e8-4762-abad-02773d41261a" path="/var/lib/kubelet/pods/d3683f1d-a6e8-4762-abad-02773d41261a/volumes" Dec 10 07:10:02 crc kubenswrapper[4765]: I1210 07:10:02.708740 4765 generic.go:334] "Generic (PLEG): container finished" podID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerID="ba5aa72180c965699311bae1545849498ef4ec359f1bd131f125992d095ed1a8" exitCode=0 Dec 10 07:10:02 crc kubenswrapper[4765]: I1210 07:10:02.708777 4765 generic.go:334] "Generic (PLEG): container finished" podID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerID="14c39c735aa99b8b06f954400fcc21893b87cf7018fa8c0f8996297d70fa51b1" exitCode=0 Dec 10 07:10:02 crc kubenswrapper[4765]: I1210 07:10:02.708819 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e26110ee-0dc1-4658-98bf-4e017a4e5aca","Type":"ContainerDied","Data":"ba5aa72180c965699311bae1545849498ef4ec359f1bd131f125992d095ed1a8"} Dec 10 07:10:02 crc kubenswrapper[4765]: I1210 07:10:02.708869 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e26110ee-0dc1-4658-98bf-4e017a4e5aca","Type":"ContainerDied","Data":"14c39c735aa99b8b06f954400fcc21893b87cf7018fa8c0f8996297d70fa51b1"} Dec 10 07:10:03 crc kubenswrapper[4765]: I1210 07:10:03.724573 4765 generic.go:334] "Generic (PLEG): container finished" podID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerID="abd821d68429dc9e298700ed9af49df7a0f429644f12d2f801124966e6ba2879" exitCode=0 Dec 10 07:10:03 crc kubenswrapper[4765]: I1210 07:10:03.724637 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e26110ee-0dc1-4658-98bf-4e017a4e5aca","Type":"ContainerDied","Data":"abd821d68429dc9e298700ed9af49df7a0f429644f12d2f801124966e6ba2879"} Dec 10 07:10:04 crc kubenswrapper[4765]: I1210 07:10:04.049361 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:10:04 crc kubenswrapper[4765]: I1210 07:10:04.049444 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:10:04 crc kubenswrapper[4765]: I1210 07:10:04.878761 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.553524 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.658968 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e26110ee-0dc1-4658-98bf-4e017a4e5aca-log-httpd\") pod \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.659045 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-config-data\") pod \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.659096 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-sg-core-conf-yaml\") pod \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.659201 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-combined-ca-bundle\") pod \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.659248 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e26110ee-0dc1-4658-98bf-4e017a4e5aca-run-httpd\") pod \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.659298 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwstk\" (UniqueName: \"kubernetes.io/projected/e26110ee-0dc1-4658-98bf-4e017a4e5aca-kube-api-access-pwstk\") pod \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.659320 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-scripts\") pod \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\" (UID: \"e26110ee-0dc1-4658-98bf-4e017a4e5aca\") " Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.660746 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e26110ee-0dc1-4658-98bf-4e017a4e5aca-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e26110ee-0dc1-4658-98bf-4e017a4e5aca" (UID: "e26110ee-0dc1-4658-98bf-4e017a4e5aca"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.660794 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e26110ee-0dc1-4658-98bf-4e017a4e5aca-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e26110ee-0dc1-4658-98bf-4e017a4e5aca" (UID: "e26110ee-0dc1-4658-98bf-4e017a4e5aca"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.664224 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-scripts" (OuterVolumeSpecName: "scripts") pod "e26110ee-0dc1-4658-98bf-4e017a4e5aca" (UID: "e26110ee-0dc1-4658-98bf-4e017a4e5aca"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.664525 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e26110ee-0dc1-4658-98bf-4e017a4e5aca-kube-api-access-pwstk" (OuterVolumeSpecName: "kube-api-access-pwstk") pod "e26110ee-0dc1-4658-98bf-4e017a4e5aca" (UID: "e26110ee-0dc1-4658-98bf-4e017a4e5aca"). InnerVolumeSpecName "kube-api-access-pwstk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.693659 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e26110ee-0dc1-4658-98bf-4e017a4e5aca" (UID: "e26110ee-0dc1-4658-98bf-4e017a4e5aca"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.744540 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e26110ee-0dc1-4658-98bf-4e017a4e5aca" (UID: "e26110ee-0dc1-4658-98bf-4e017a4e5aca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.760852 4765 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.760910 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.760922 4765 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e26110ee-0dc1-4658-98bf-4e017a4e5aca-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.760932 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwstk\" (UniqueName: \"kubernetes.io/projected/e26110ee-0dc1-4658-98bf-4e017a4e5aca-kube-api-access-pwstk\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.760943 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.760973 4765 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e26110ee-0dc1-4658-98bf-4e017a4e5aca-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.765707 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-config-data" (OuterVolumeSpecName: "config-data") pod "e26110ee-0dc1-4658-98bf-4e017a4e5aca" (UID: "e26110ee-0dc1-4658-98bf-4e017a4e5aca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.792418 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.792443 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e26110ee-0dc1-4658-98bf-4e017a4e5aca","Type":"ContainerDied","Data":"456b8d19a5acf1d4d328e1eb8e6ae8dfd3184c5cc7688cbee10b157e708139a9"} Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.792514 4765 scope.go:117] "RemoveContainer" containerID="ba5aa72180c965699311bae1545849498ef4ec359f1bd131f125992d095ed1a8" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.794260 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"3d46af4c-da3b-47cb-a069-cb978f0df610","Type":"ContainerStarted","Data":"8faab6e5a335c25d8b666620bbbd06c7e6cd700db4ab561bb39a1a3cf463d922"} Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.812304 4765 scope.go:117] "RemoveContainer" containerID="94fab9d59fc3ebcf2a75efa38c07c7e63723d9b26094698b2a37cae74adf8360" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.815253 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.3074632729999998 podStartE2EDuration="13.815235235s" podCreationTimestamp="2025-12-10 07:09:55 +0000 UTC" firstStartedPulling="2025-12-10 07:09:56.742840762 +0000 UTC m=+1316.469506078" lastFinishedPulling="2025-12-10 07:10:08.250612724 +0000 UTC m=+1327.977278040" observedRunningTime="2025-12-10 07:10:08.809393729 +0000 UTC m=+1328.536059045" watchObservedRunningTime="2025-12-10 07:10:08.815235235 +0000 UTC m=+1328.541900551" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.851656 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.867546 4765 scope.go:117] "RemoveContainer" containerID="abd821d68429dc9e298700ed9af49df7a0f429644f12d2f801124966e6ba2879" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.867804 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e26110ee-0dc1-4658-98bf-4e017a4e5aca-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.869981 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.895265 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:10:08 crc kubenswrapper[4765]: E1210 07:10:08.895719 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerName="ceilometer-notification-agent" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.895740 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerName="ceilometer-notification-agent" Dec 10 07:10:08 crc kubenswrapper[4765]: E1210 07:10:08.895753 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerName="sg-core" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.895760 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerName="sg-core" Dec 10 07:10:08 crc kubenswrapper[4765]: E1210 07:10:08.895770 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3683f1d-a6e8-4762-abad-02773d41261a" containerName="barbican-api" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.895776 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3683f1d-a6e8-4762-abad-02773d41261a" containerName="barbican-api" Dec 10 07:10:08 crc kubenswrapper[4765]: E1210 07:10:08.895801 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerName="proxy-httpd" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.895807 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerName="proxy-httpd" Dec 10 07:10:08 crc kubenswrapper[4765]: E1210 07:10:08.895817 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerName="ceilometer-central-agent" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.895822 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerName="ceilometer-central-agent" Dec 10 07:10:08 crc kubenswrapper[4765]: E1210 07:10:08.895833 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3683f1d-a6e8-4762-abad-02773d41261a" containerName="barbican-api-log" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.895839 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3683f1d-a6e8-4762-abad-02773d41261a" containerName="barbican-api-log" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.896026 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerName="ceilometer-central-agent" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.896042 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3683f1d-a6e8-4762-abad-02773d41261a" containerName="barbican-api" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.896057 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerName="ceilometer-notification-agent" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.896070 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3683f1d-a6e8-4762-abad-02773d41261a" containerName="barbican-api-log" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.896077 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerName="sg-core" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.896107 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" containerName="proxy-httpd" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.897929 4765 scope.go:117] "RemoveContainer" containerID="14c39c735aa99b8b06f954400fcc21893b87cf7018fa8c0f8996297d70fa51b1" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.898287 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.908696 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.908836 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.910643 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.970299 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.970386 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-scripts\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.970577 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgkcl\" (UniqueName: \"kubernetes.io/projected/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-kube-api-access-dgkcl\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.970769 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-log-httpd\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.970939 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-run-httpd\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.970993 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-config-data\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:08 crc kubenswrapper[4765]: I1210 07:10:08.971110 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:09 crc kubenswrapper[4765]: I1210 07:10:09.072541 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:09 crc kubenswrapper[4765]: I1210 07:10:09.072876 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-scripts\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:09 crc kubenswrapper[4765]: I1210 07:10:09.072914 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgkcl\" (UniqueName: \"kubernetes.io/projected/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-kube-api-access-dgkcl\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:09 crc kubenswrapper[4765]: I1210 07:10:09.072950 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-log-httpd\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:09 crc kubenswrapper[4765]: I1210 07:10:09.072996 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-run-httpd\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:09 crc kubenswrapper[4765]: I1210 07:10:09.073019 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-config-data\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:09 crc kubenswrapper[4765]: I1210 07:10:09.073046 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:09 crc kubenswrapper[4765]: I1210 07:10:09.073522 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-log-httpd\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:09 crc kubenswrapper[4765]: I1210 07:10:09.074258 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-run-httpd\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:09 crc kubenswrapper[4765]: I1210 07:10:09.078200 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-scripts\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:09 crc kubenswrapper[4765]: I1210 07:10:09.078420 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:09 crc kubenswrapper[4765]: I1210 07:10:09.078521 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:09 crc kubenswrapper[4765]: I1210 07:10:09.078756 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-config-data\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:09 crc kubenswrapper[4765]: I1210 07:10:09.098449 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgkcl\" (UniqueName: \"kubernetes.io/projected/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-kube-api-access-dgkcl\") pod \"ceilometer-0\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " pod="openstack/ceilometer-0" Dec 10 07:10:09 crc kubenswrapper[4765]: I1210 07:10:09.236899 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:10:09 crc kubenswrapper[4765]: I1210 07:10:09.504324 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:10:09 crc kubenswrapper[4765]: I1210 07:10:09.711022 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:10:09 crc kubenswrapper[4765]: I1210 07:10:09.803858 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad2872ee-df3d-42a4-aac0-2d41ab8a8507","Type":"ContainerStarted","Data":"b53e170e1a8a3a89a135fb18b5821b354b91e5fa1a429538dceab095c5051812"} Dec 10 07:10:10 crc kubenswrapper[4765]: I1210 07:10:10.068477 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:10:10 crc kubenswrapper[4765]: I1210 07:10:10.069422 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:10:10 crc kubenswrapper[4765]: I1210 07:10:10.606698 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e26110ee-0dc1-4658-98bf-4e017a4e5aca" path="/var/lib/kubelet/pods/e26110ee-0dc1-4658-98bf-4e017a4e5aca/volumes" Dec 10 07:10:11 crc kubenswrapper[4765]: I1210 07:10:11.378142 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 07:10:11 crc kubenswrapper[4765]: I1210 07:10:11.379624 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="e773fc35-349d-4256-b0e9-843aaa6dd6c3" containerName="kube-state-metrics" containerID="cri-o://79a5550779e6fbfd015d5f1fa3770043ff09dff1bc1f5ff7329f6ba56fef9c03" gracePeriod=30 Dec 10 07:10:11 crc kubenswrapper[4765]: I1210 07:10:11.833832 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad2872ee-df3d-42a4-aac0-2d41ab8a8507","Type":"ContainerStarted","Data":"7a1a3cbc3b2789631ff329398e6cdf91a13a4bf48c3ff4b3239199f7345881db"} Dec 10 07:10:11 crc kubenswrapper[4765]: I1210 07:10:11.834192 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad2872ee-df3d-42a4-aac0-2d41ab8a8507","Type":"ContainerStarted","Data":"dfd2c4b357076553f899d799cef143571917022cc975f95c96aaba7fb3303908"} Dec 10 07:10:11 crc kubenswrapper[4765]: I1210 07:10:11.838127 4765 generic.go:334] "Generic (PLEG): container finished" podID="e773fc35-349d-4256-b0e9-843aaa6dd6c3" containerID="79a5550779e6fbfd015d5f1fa3770043ff09dff1bc1f5ff7329f6ba56fef9c03" exitCode=2 Dec 10 07:10:11 crc kubenswrapper[4765]: I1210 07:10:11.838190 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e773fc35-349d-4256-b0e9-843aaa6dd6c3","Type":"ContainerDied","Data":"79a5550779e6fbfd015d5f1fa3770043ff09dff1bc1f5ff7329f6ba56fef9c03"} Dec 10 07:10:11 crc kubenswrapper[4765]: I1210 07:10:11.921745 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.040812 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nn92c\" (UniqueName: \"kubernetes.io/projected/e773fc35-349d-4256-b0e9-843aaa6dd6c3-kube-api-access-nn92c\") pod \"e773fc35-349d-4256-b0e9-843aaa6dd6c3\" (UID: \"e773fc35-349d-4256-b0e9-843aaa6dd6c3\") " Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.052280 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e773fc35-349d-4256-b0e9-843aaa6dd6c3-kube-api-access-nn92c" (OuterVolumeSpecName: "kube-api-access-nn92c") pod "e773fc35-349d-4256-b0e9-843aaa6dd6c3" (UID: "e773fc35-349d-4256-b0e9-843aaa6dd6c3"). InnerVolumeSpecName "kube-api-access-nn92c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.145372 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nn92c\" (UniqueName: \"kubernetes.io/projected/e773fc35-349d-4256-b0e9-843aaa6dd6c3-kube-api-access-nn92c\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.850886 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad2872ee-df3d-42a4-aac0-2d41ab8a8507","Type":"ContainerStarted","Data":"f6bc7e15a3c9cc5e40a9971d975e46511c895b183aba2e6f87f9e3e2cffa824e"} Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.854879 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e773fc35-349d-4256-b0e9-843aaa6dd6c3","Type":"ContainerDied","Data":"09c5a27b8a3ade82dd5948c7c41fda14e9b878ac7519b79f94446e371a8a4955"} Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.855948 4765 scope.go:117] "RemoveContainer" containerID="79a5550779e6fbfd015d5f1fa3770043ff09dff1bc1f5ff7329f6ba56fef9c03" Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.855174 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.881773 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.892727 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.906681 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 07:10:12 crc kubenswrapper[4765]: E1210 07:10:12.907063 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e773fc35-349d-4256-b0e9-843aaa6dd6c3" containerName="kube-state-metrics" Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.907076 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e773fc35-349d-4256-b0e9-843aaa6dd6c3" containerName="kube-state-metrics" Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.907303 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="e773fc35-349d-4256-b0e9-843aaa6dd6c3" containerName="kube-state-metrics" Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.908019 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.911558 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.911563 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.929541 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.961919 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6c300c8-c9c7-40c3-8874-236b21eb4856-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e6c300c8-c9c7-40c3-8874-236b21eb4856\") " pod="openstack/kube-state-metrics-0" Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.962213 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6c300c8-c9c7-40c3-8874-236b21eb4856-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e6c300c8-c9c7-40c3-8874-236b21eb4856\") " pod="openstack/kube-state-metrics-0" Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.962297 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e6c300c8-c9c7-40c3-8874-236b21eb4856-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e6c300c8-c9c7-40c3-8874-236b21eb4856\") " pod="openstack/kube-state-metrics-0" Dec 10 07:10:12 crc kubenswrapper[4765]: I1210 07:10:12.962374 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k569q\" (UniqueName: \"kubernetes.io/projected/e6c300c8-c9c7-40c3-8874-236b21eb4856-kube-api-access-k569q\") pod \"kube-state-metrics-0\" (UID: \"e6c300c8-c9c7-40c3-8874-236b21eb4856\") " pod="openstack/kube-state-metrics-0" Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.064509 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6c300c8-c9c7-40c3-8874-236b21eb4856-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e6c300c8-c9c7-40c3-8874-236b21eb4856\") " pod="openstack/kube-state-metrics-0" Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.064588 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e6c300c8-c9c7-40c3-8874-236b21eb4856-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e6c300c8-c9c7-40c3-8874-236b21eb4856\") " pod="openstack/kube-state-metrics-0" Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.064633 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k569q\" (UniqueName: \"kubernetes.io/projected/e6c300c8-c9c7-40c3-8874-236b21eb4856-kube-api-access-k569q\") pod \"kube-state-metrics-0\" (UID: \"e6c300c8-c9c7-40c3-8874-236b21eb4856\") " pod="openstack/kube-state-metrics-0" Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.064697 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6c300c8-c9c7-40c3-8874-236b21eb4856-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e6c300c8-c9c7-40c3-8874-236b21eb4856\") " pod="openstack/kube-state-metrics-0" Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.070157 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e6c300c8-c9c7-40c3-8874-236b21eb4856-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e6c300c8-c9c7-40c3-8874-236b21eb4856\") " pod="openstack/kube-state-metrics-0" Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.077691 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6c300c8-c9c7-40c3-8874-236b21eb4856-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e6c300c8-c9c7-40c3-8874-236b21eb4856\") " pod="openstack/kube-state-metrics-0" Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.078036 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6c300c8-c9c7-40c3-8874-236b21eb4856-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e6c300c8-c9c7-40c3-8874-236b21eb4856\") " pod="openstack/kube-state-metrics-0" Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.093029 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k569q\" (UniqueName: \"kubernetes.io/projected/e6c300c8-c9c7-40c3-8874-236b21eb4856-kube-api-access-k569q\") pod \"kube-state-metrics-0\" (UID: \"e6c300c8-c9c7-40c3-8874-236b21eb4856\") " pod="openstack/kube-state-metrics-0" Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.227421 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.804012 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-vclgh"] Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.805758 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-vclgh" Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.816988 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-vclgh"] Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.902500 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.906567 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fea25c4-5340-4fab-87cd-22fa2bea0028-operator-scripts\") pod \"nova-api-db-create-vclgh\" (UID: \"7fea25c4-5340-4fab-87cd-22fa2bea0028\") " pod="openstack/nova-api-db-create-vclgh" Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.906629 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbng9\" (UniqueName: \"kubernetes.io/projected/7fea25c4-5340-4fab-87cd-22fa2bea0028-kube-api-access-hbng9\") pod \"nova-api-db-create-vclgh\" (UID: \"7fea25c4-5340-4fab-87cd-22fa2bea0028\") " pod="openstack/nova-api-db-create-vclgh" Dec 10 07:10:13 crc kubenswrapper[4765]: W1210 07:10:13.919798 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode6c300c8_c9c7_40c3_8874_236b21eb4856.slice/crio-80c83747377768d2308b28c66acb2bab655ca2d1cd5cfc9a9be2b5c91e048a67 WatchSource:0}: Error finding container 80c83747377768d2308b28c66acb2bab655ca2d1cd5cfc9a9be2b5c91e048a67: Status 404 returned error can't find the container with id 80c83747377768d2308b28c66acb2bab655ca2d1cd5cfc9a9be2b5c91e048a67 Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.920102 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-2nwrc"] Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.921845 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-2nwrc" Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.937225 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-2nwrc"] Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.965497 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-1a4f-account-create-update-mw9d9"] Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.968287 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1a4f-account-create-update-mw9d9" Dec 10 07:10:13 crc kubenswrapper[4765]: I1210 07:10:13.970398 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.004271 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-1a4f-account-create-update-mw9d9"] Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.008618 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fea25c4-5340-4fab-87cd-22fa2bea0028-operator-scripts\") pod \"nova-api-db-create-vclgh\" (UID: \"7fea25c4-5340-4fab-87cd-22fa2bea0028\") " pod="openstack/nova-api-db-create-vclgh" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.008678 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbng9\" (UniqueName: \"kubernetes.io/projected/7fea25c4-5340-4fab-87cd-22fa2bea0028-kube-api-access-hbng9\") pod \"nova-api-db-create-vclgh\" (UID: \"7fea25c4-5340-4fab-87cd-22fa2bea0028\") " pod="openstack/nova-api-db-create-vclgh" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.008751 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df56d76b-89b4-47f6-a7e6-386d4d49ff43-operator-scripts\") pod \"nova-cell0-db-create-2nwrc\" (UID: \"df56d76b-89b4-47f6-a7e6-386d4d49ff43\") " pod="openstack/nova-cell0-db-create-2nwrc" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.008823 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgzrm\" (UniqueName: \"kubernetes.io/projected/df56d76b-89b4-47f6-a7e6-386d4d49ff43-kube-api-access-sgzrm\") pod \"nova-cell0-db-create-2nwrc\" (UID: \"df56d76b-89b4-47f6-a7e6-386d4d49ff43\") " pod="openstack/nova-cell0-db-create-2nwrc" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.009753 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fea25c4-5340-4fab-87cd-22fa2bea0028-operator-scripts\") pod \"nova-api-db-create-vclgh\" (UID: \"7fea25c4-5340-4fab-87cd-22fa2bea0028\") " pod="openstack/nova-api-db-create-vclgh" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.029649 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-sxcwt"] Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.031153 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-sxcwt" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.031875 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbng9\" (UniqueName: \"kubernetes.io/projected/7fea25c4-5340-4fab-87cd-22fa2bea0028-kube-api-access-hbng9\") pod \"nova-api-db-create-vclgh\" (UID: \"7fea25c4-5340-4fab-87cd-22fa2bea0028\") " pod="openstack/nova-api-db-create-vclgh" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.047338 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-sxcwt"] Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.107981 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-92ce-account-create-update-clqvf"] Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.110737 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgzrm\" (UniqueName: \"kubernetes.io/projected/df56d76b-89b4-47f6-a7e6-386d4d49ff43-kube-api-access-sgzrm\") pod \"nova-cell0-db-create-2nwrc\" (UID: \"df56d76b-89b4-47f6-a7e6-386d4d49ff43\") " pod="openstack/nova-cell0-db-create-2nwrc" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.110817 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cdf8425c-b4a5-43ee-9cce-870ae66cdb6a-operator-scripts\") pod \"nova-cell1-db-create-sxcwt\" (UID: \"cdf8425c-b4a5-43ee-9cce-870ae66cdb6a\") " pod="openstack/nova-cell1-db-create-sxcwt" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.110847 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9tbf\" (UniqueName: \"kubernetes.io/projected/cdf8425c-b4a5-43ee-9cce-870ae66cdb6a-kube-api-access-t9tbf\") pod \"nova-cell1-db-create-sxcwt\" (UID: \"cdf8425c-b4a5-43ee-9cce-870ae66cdb6a\") " pod="openstack/nova-cell1-db-create-sxcwt" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.110891 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhs65\" (UniqueName: \"kubernetes.io/projected/02844920-dce5-4755-a9f7-794a1a95ed34-kube-api-access-bhs65\") pod \"nova-api-1a4f-account-create-update-mw9d9\" (UID: \"02844920-dce5-4755-a9f7-794a1a95ed34\") " pod="openstack/nova-api-1a4f-account-create-update-mw9d9" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.110978 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02844920-dce5-4755-a9f7-794a1a95ed34-operator-scripts\") pod \"nova-api-1a4f-account-create-update-mw9d9\" (UID: \"02844920-dce5-4755-a9f7-794a1a95ed34\") " pod="openstack/nova-api-1a4f-account-create-update-mw9d9" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.111105 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df56d76b-89b4-47f6-a7e6-386d4d49ff43-operator-scripts\") pod \"nova-cell0-db-create-2nwrc\" (UID: \"df56d76b-89b4-47f6-a7e6-386d4d49ff43\") " pod="openstack/nova-cell0-db-create-2nwrc" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.112364 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df56d76b-89b4-47f6-a7e6-386d4d49ff43-operator-scripts\") pod \"nova-cell0-db-create-2nwrc\" (UID: \"df56d76b-89b4-47f6-a7e6-386d4d49ff43\") " pod="openstack/nova-cell0-db-create-2nwrc" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.112481 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-92ce-account-create-update-clqvf" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.114385 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.135777 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgzrm\" (UniqueName: \"kubernetes.io/projected/df56d76b-89b4-47f6-a7e6-386d4d49ff43-kube-api-access-sgzrm\") pod \"nova-cell0-db-create-2nwrc\" (UID: \"df56d76b-89b4-47f6-a7e6-386d4d49ff43\") " pod="openstack/nova-cell0-db-create-2nwrc" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.136276 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-vclgh" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.136492 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-92ce-account-create-update-clqvf"] Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.216269 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5kzn\" (UniqueName: \"kubernetes.io/projected/53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27-kube-api-access-g5kzn\") pod \"nova-cell0-92ce-account-create-update-clqvf\" (UID: \"53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27\") " pod="openstack/nova-cell0-92ce-account-create-update-clqvf" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.216461 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cdf8425c-b4a5-43ee-9cce-870ae66cdb6a-operator-scripts\") pod \"nova-cell1-db-create-sxcwt\" (UID: \"cdf8425c-b4a5-43ee-9cce-870ae66cdb6a\") " pod="openstack/nova-cell1-db-create-sxcwt" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.216493 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9tbf\" (UniqueName: \"kubernetes.io/projected/cdf8425c-b4a5-43ee-9cce-870ae66cdb6a-kube-api-access-t9tbf\") pod \"nova-cell1-db-create-sxcwt\" (UID: \"cdf8425c-b4a5-43ee-9cce-870ae66cdb6a\") " pod="openstack/nova-cell1-db-create-sxcwt" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.216541 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhs65\" (UniqueName: \"kubernetes.io/projected/02844920-dce5-4755-a9f7-794a1a95ed34-kube-api-access-bhs65\") pod \"nova-api-1a4f-account-create-update-mw9d9\" (UID: \"02844920-dce5-4755-a9f7-794a1a95ed34\") " pod="openstack/nova-api-1a4f-account-create-update-mw9d9" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.216573 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27-operator-scripts\") pod \"nova-cell0-92ce-account-create-update-clqvf\" (UID: \"53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27\") " pod="openstack/nova-cell0-92ce-account-create-update-clqvf" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.216634 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02844920-dce5-4755-a9f7-794a1a95ed34-operator-scripts\") pod \"nova-api-1a4f-account-create-update-mw9d9\" (UID: \"02844920-dce5-4755-a9f7-794a1a95ed34\") " pod="openstack/nova-api-1a4f-account-create-update-mw9d9" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.217580 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cdf8425c-b4a5-43ee-9cce-870ae66cdb6a-operator-scripts\") pod \"nova-cell1-db-create-sxcwt\" (UID: \"cdf8425c-b4a5-43ee-9cce-870ae66cdb6a\") " pod="openstack/nova-cell1-db-create-sxcwt" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.217994 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02844920-dce5-4755-a9f7-794a1a95ed34-operator-scripts\") pod \"nova-api-1a4f-account-create-update-mw9d9\" (UID: \"02844920-dce5-4755-a9f7-794a1a95ed34\") " pod="openstack/nova-api-1a4f-account-create-update-mw9d9" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.237133 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhs65\" (UniqueName: \"kubernetes.io/projected/02844920-dce5-4755-a9f7-794a1a95ed34-kube-api-access-bhs65\") pod \"nova-api-1a4f-account-create-update-mw9d9\" (UID: \"02844920-dce5-4755-a9f7-794a1a95ed34\") " pod="openstack/nova-api-1a4f-account-create-update-mw9d9" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.238382 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9tbf\" (UniqueName: \"kubernetes.io/projected/cdf8425c-b4a5-43ee-9cce-870ae66cdb6a-kube-api-access-t9tbf\") pod \"nova-cell1-db-create-sxcwt\" (UID: \"cdf8425c-b4a5-43ee-9cce-870ae66cdb6a\") " pod="openstack/nova-cell1-db-create-sxcwt" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.268787 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-2nwrc" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.307762 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1a4f-account-create-update-mw9d9" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.320201 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5kzn\" (UniqueName: \"kubernetes.io/projected/53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27-kube-api-access-g5kzn\") pod \"nova-cell0-92ce-account-create-update-clqvf\" (UID: \"53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27\") " pod="openstack/nova-cell0-92ce-account-create-update-clqvf" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.320350 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27-operator-scripts\") pod \"nova-cell0-92ce-account-create-update-clqvf\" (UID: \"53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27\") " pod="openstack/nova-cell0-92ce-account-create-update-clqvf" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.321160 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27-operator-scripts\") pod \"nova-cell0-92ce-account-create-update-clqvf\" (UID: \"53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27\") " pod="openstack/nova-cell0-92ce-account-create-update-clqvf" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.374197 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5kzn\" (UniqueName: \"kubernetes.io/projected/53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27-kube-api-access-g5kzn\") pod \"nova-cell0-92ce-account-create-update-clqvf\" (UID: \"53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27\") " pod="openstack/nova-cell0-92ce-account-create-update-clqvf" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.407857 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-sxcwt" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.417156 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-4d32-account-create-update-mpglj"] Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.418527 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4d32-account-create-update-mpglj" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.423258 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.430883 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-92ce-account-create-update-clqvf" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.444770 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-4d32-account-create-update-mpglj"] Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.525403 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gss5t\" (UniqueName: \"kubernetes.io/projected/2fbf6764-af16-4874-8b04-94c3cafebed7-kube-api-access-gss5t\") pod \"nova-cell1-4d32-account-create-update-mpglj\" (UID: \"2fbf6764-af16-4874-8b04-94c3cafebed7\") " pod="openstack/nova-cell1-4d32-account-create-update-mpglj" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.525521 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fbf6764-af16-4874-8b04-94c3cafebed7-operator-scripts\") pod \"nova-cell1-4d32-account-create-update-mpglj\" (UID: \"2fbf6764-af16-4874-8b04-94c3cafebed7\") " pod="openstack/nova-cell1-4d32-account-create-update-mpglj" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.626722 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fbf6764-af16-4874-8b04-94c3cafebed7-operator-scripts\") pod \"nova-cell1-4d32-account-create-update-mpglj\" (UID: \"2fbf6764-af16-4874-8b04-94c3cafebed7\") " pod="openstack/nova-cell1-4d32-account-create-update-mpglj" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.626839 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gss5t\" (UniqueName: \"kubernetes.io/projected/2fbf6764-af16-4874-8b04-94c3cafebed7-kube-api-access-gss5t\") pod \"nova-cell1-4d32-account-create-update-mpglj\" (UID: \"2fbf6764-af16-4874-8b04-94c3cafebed7\") " pod="openstack/nova-cell1-4d32-account-create-update-mpglj" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.627862 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fbf6764-af16-4874-8b04-94c3cafebed7-operator-scripts\") pod \"nova-cell1-4d32-account-create-update-mpglj\" (UID: \"2fbf6764-af16-4874-8b04-94c3cafebed7\") " pod="openstack/nova-cell1-4d32-account-create-update-mpglj" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.636413 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e773fc35-349d-4256-b0e9-843aaa6dd6c3" path="/var/lib/kubelet/pods/e773fc35-349d-4256-b0e9-843aaa6dd6c3/volumes" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.653876 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gss5t\" (UniqueName: \"kubernetes.io/projected/2fbf6764-af16-4874-8b04-94c3cafebed7-kube-api-access-gss5t\") pod \"nova-cell1-4d32-account-create-update-mpglj\" (UID: \"2fbf6764-af16-4874-8b04-94c3cafebed7\") " pod="openstack/nova-cell1-4d32-account-create-update-mpglj" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.874948 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4d32-account-create-update-mpglj" Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.882242 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e6c300c8-c9c7-40c3-8874-236b21eb4856","Type":"ContainerStarted","Data":"80c83747377768d2308b28c66acb2bab655ca2d1cd5cfc9a9be2b5c91e048a67"} Dec 10 07:10:14 crc kubenswrapper[4765]: I1210 07:10:14.994906 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-vclgh"] Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.148933 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-2nwrc"] Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.182791 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-1a4f-account-create-update-mw9d9"] Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.324128 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-92ce-account-create-update-clqvf"] Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.338738 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-sxcwt"] Dec 10 07:10:15 crc kubenswrapper[4765]: W1210 07:10:15.637281 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fbf6764_af16_4874_8b04_94c3cafebed7.slice/crio-bf08d25a8c35a3d61a26a4ec0d3d7bbd2b2f609d993e3a258ca62f9ae240421d WatchSource:0}: Error finding container bf08d25a8c35a3d61a26a4ec0d3d7bbd2b2f609d993e3a258ca62f9ae240421d: Status 404 returned error can't find the container with id bf08d25a8c35a3d61a26a4ec0d3d7bbd2b2f609d993e3a258ca62f9ae240421d Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.642111 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-4d32-account-create-update-mpglj"] Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.897889 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4d32-account-create-update-mpglj" event={"ID":"2fbf6764-af16-4874-8b04-94c3cafebed7","Type":"ContainerStarted","Data":"20678e247649d49596783de42d6a2c5262eae48ebad74ca06697392a4449445a"} Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.897938 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4d32-account-create-update-mpglj" event={"ID":"2fbf6764-af16-4874-8b04-94c3cafebed7","Type":"ContainerStarted","Data":"bf08d25a8c35a3d61a26a4ec0d3d7bbd2b2f609d993e3a258ca62f9ae240421d"} Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.902604 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-sxcwt" event={"ID":"cdf8425c-b4a5-43ee-9cce-870ae66cdb6a","Type":"ContainerStarted","Data":"52eb1492fe3fa232a7cddaeabaf0c0ba85920a2cd5f59a77e514f89786b23fea"} Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.902754 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-sxcwt" event={"ID":"cdf8425c-b4a5-43ee-9cce-870ae66cdb6a","Type":"ContainerStarted","Data":"fb63903726b0b2537c83076343d16e9a6a83e30978bc32fd57042da00b5c71fb"} Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.903974 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1a4f-account-create-update-mw9d9" event={"ID":"02844920-dce5-4755-a9f7-794a1a95ed34","Type":"ContainerStarted","Data":"007770fdd95bd2e9cdc72b7e466ab66a2ca46ec8421fd2477c7760e27e8beae3"} Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.903994 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1a4f-account-create-update-mw9d9" event={"ID":"02844920-dce5-4755-a9f7-794a1a95ed34","Type":"ContainerStarted","Data":"54f6ee556ae0af751cfa3f57bf2e0cb002a0e441ad8d7582dbe62c4ad2873d21"} Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.910281 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-vclgh" event={"ID":"7fea25c4-5340-4fab-87cd-22fa2bea0028","Type":"ContainerStarted","Data":"4e88a8102447c5fe4da6574043b71cbe14ab7c7a10c5423eb7f9dded6b829962"} Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.910365 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-vclgh" event={"ID":"7fea25c4-5340-4fab-87cd-22fa2bea0028","Type":"ContainerStarted","Data":"5b38f80f7849c324dd83c1cdeb08d63062843a859ca066508ee17f9f3b6a773a"} Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.921549 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-4d32-account-create-update-mpglj" podStartSLOduration=1.921528233 podStartE2EDuration="1.921528233s" podCreationTimestamp="2025-12-10 07:10:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:10:15.917213901 +0000 UTC m=+1335.643879217" watchObservedRunningTime="2025-12-10 07:10:15.921528233 +0000 UTC m=+1335.648193559" Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.927270 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-2nwrc" event={"ID":"df56d76b-89b4-47f6-a7e6-386d4d49ff43","Type":"ContainerStarted","Data":"79bef78526d2397a18b4954f88042786fd92ad30f41ebac7f4e0b8dc9f125fef"} Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.928002 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-2nwrc" event={"ID":"df56d76b-89b4-47f6-a7e6-386d4d49ff43","Type":"ContainerStarted","Data":"22567931eb9aeeffc16a4eb24526342096afdd429ae3239e662fb8489913748f"} Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.936064 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-92ce-account-create-update-clqvf" event={"ID":"53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27","Type":"ContainerStarted","Data":"2fa399e313ba09c6c2f05ded479e44437ef840c932e96bb4f5e4fda5b0f14797"} Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.936125 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-92ce-account-create-update-clqvf" event={"ID":"53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27","Type":"ContainerStarted","Data":"601be7cfe3c472f2bc3cb0522ddb0909adc3d1c26bfe5dc290b555a0190cca74"} Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.953267 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-sxcwt" podStartSLOduration=2.953238154 podStartE2EDuration="2.953238154s" podCreationTimestamp="2025-12-10 07:10:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:10:15.934928824 +0000 UTC m=+1335.661594130" watchObservedRunningTime="2025-12-10 07:10:15.953238154 +0000 UTC m=+1335.679903470" Dec 10 07:10:15 crc kubenswrapper[4765]: I1210 07:10:15.961310 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-1a4f-account-create-update-mw9d9" podStartSLOduration=2.961286423 podStartE2EDuration="2.961286423s" podCreationTimestamp="2025-12-10 07:10:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:10:15.955825368 +0000 UTC m=+1335.682490684" watchObservedRunningTime="2025-12-10 07:10:15.961286423 +0000 UTC m=+1335.687951739" Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.005887 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-2nwrc" podStartSLOduration=3.005868052 podStartE2EDuration="3.005868052s" podCreationTimestamp="2025-12-10 07:10:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:10:15.970001551 +0000 UTC m=+1335.696666867" watchObservedRunningTime="2025-12-10 07:10:16.005868052 +0000 UTC m=+1335.732533368" Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.021373 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-92ce-account-create-update-clqvf" podStartSLOduration=2.021346722 podStartE2EDuration="2.021346722s" podCreationTimestamp="2025-12-10 07:10:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:10:15.99317032 +0000 UTC m=+1335.719835636" watchObservedRunningTime="2025-12-10 07:10:16.021346722 +0000 UTC m=+1335.748012038" Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.034584 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-vclgh" podStartSLOduration=3.034561818 podStartE2EDuration="3.034561818s" podCreationTimestamp="2025-12-10 07:10:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:10:16.013706895 +0000 UTC m=+1335.740372211" watchObservedRunningTime="2025-12-10 07:10:16.034561818 +0000 UTC m=+1335.761227134" Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.636335 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.636648 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7cded555-d21d-4dac-9806-a6e1071683c1" containerName="glance-log" containerID="cri-o://7f219892c61ed822df7b7ed0213204ecc23cae587dcf3561a460b12cc40a5a70" gracePeriod=30 Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.638492 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7cded555-d21d-4dac-9806-a6e1071683c1" containerName="glance-httpd" containerID="cri-o://76e8fe88229afb1d9ca3ad856a94ccb359d8115b2efae3b4af33f3bfb4e92487" gracePeriod=30 Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.947706 4765 generic.go:334] "Generic (PLEG): container finished" podID="df56d76b-89b4-47f6-a7e6-386d4d49ff43" containerID="79bef78526d2397a18b4954f88042786fd92ad30f41ebac7f4e0b8dc9f125fef" exitCode=0 Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.948240 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-2nwrc" event={"ID":"df56d76b-89b4-47f6-a7e6-386d4d49ff43","Type":"ContainerDied","Data":"79bef78526d2397a18b4954f88042786fd92ad30f41ebac7f4e0b8dc9f125fef"} Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.950489 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e6c300c8-c9c7-40c3-8874-236b21eb4856","Type":"ContainerStarted","Data":"e98ba8a1e17537f2f52fedb3f0ff188945fc2a52ebce2eae3f075e33b33804bd"} Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.950685 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.952622 4765 generic.go:334] "Generic (PLEG): container finished" podID="2fbf6764-af16-4874-8b04-94c3cafebed7" containerID="20678e247649d49596783de42d6a2c5262eae48ebad74ca06697392a4449445a" exitCode=0 Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.952696 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4d32-account-create-update-mpglj" event={"ID":"2fbf6764-af16-4874-8b04-94c3cafebed7","Type":"ContainerDied","Data":"20678e247649d49596783de42d6a2c5262eae48ebad74ca06697392a4449445a"} Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.955360 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad2872ee-df3d-42a4-aac0-2d41ab8a8507","Type":"ContainerStarted","Data":"47d0ce59c285b5f929d32e5447d6b48e2628860d076018eb073e371e9b012beb"} Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.955520 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.955515 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerName="proxy-httpd" containerID="cri-o://47d0ce59c285b5f929d32e5447d6b48e2628860d076018eb073e371e9b012beb" gracePeriod=30 Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.955512 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerName="ceilometer-central-agent" containerID="cri-o://dfd2c4b357076553f899d799cef143571917022cc975f95c96aaba7fb3303908" gracePeriod=30 Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.955556 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerName="ceilometer-notification-agent" containerID="cri-o://7a1a3cbc3b2789631ff329398e6cdf91a13a4bf48c3ff4b3239199f7345881db" gracePeriod=30 Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.955548 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerName="sg-core" containerID="cri-o://f6bc7e15a3c9cc5e40a9971d975e46511c895b183aba2e6f87f9e3e2cffa824e" gracePeriod=30 Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.958183 4765 generic.go:334] "Generic (PLEG): container finished" podID="cdf8425c-b4a5-43ee-9cce-870ae66cdb6a" containerID="52eb1492fe3fa232a7cddaeabaf0c0ba85920a2cd5f59a77e514f89786b23fea" exitCode=0 Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.958267 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-sxcwt" event={"ID":"cdf8425c-b4a5-43ee-9cce-870ae66cdb6a","Type":"ContainerDied","Data":"52eb1492fe3fa232a7cddaeabaf0c0ba85920a2cd5f59a77e514f89786b23fea"} Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.963525 4765 generic.go:334] "Generic (PLEG): container finished" podID="02844920-dce5-4755-a9f7-794a1a95ed34" containerID="007770fdd95bd2e9cdc72b7e466ab66a2ca46ec8421fd2477c7760e27e8beae3" exitCode=0 Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.963675 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1a4f-account-create-update-mw9d9" event={"ID":"02844920-dce5-4755-a9f7-794a1a95ed34","Type":"ContainerDied","Data":"007770fdd95bd2e9cdc72b7e466ab66a2ca46ec8421fd2477c7760e27e8beae3"} Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.966317 4765 generic.go:334] "Generic (PLEG): container finished" podID="7cded555-d21d-4dac-9806-a6e1071683c1" containerID="7f219892c61ed822df7b7ed0213204ecc23cae587dcf3561a460b12cc40a5a70" exitCode=143 Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.966381 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7cded555-d21d-4dac-9806-a6e1071683c1","Type":"ContainerDied","Data":"7f219892c61ed822df7b7ed0213204ecc23cae587dcf3561a460b12cc40a5a70"} Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.977754 4765 generic.go:334] "Generic (PLEG): container finished" podID="7fea25c4-5340-4fab-87cd-22fa2bea0028" containerID="4e88a8102447c5fe4da6574043b71cbe14ab7c7a10c5423eb7f9dded6b829962" exitCode=0 Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.977849 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-vclgh" event={"ID":"7fea25c4-5340-4fab-87cd-22fa2bea0028","Type":"ContainerDied","Data":"4e88a8102447c5fe4da6574043b71cbe14ab7c7a10c5423eb7f9dded6b829962"} Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.979562 4765 generic.go:334] "Generic (PLEG): container finished" podID="53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27" containerID="2fa399e313ba09c6c2f05ded479e44437ef840c932e96bb4f5e4fda5b0f14797" exitCode=0 Dec 10 07:10:16 crc kubenswrapper[4765]: I1210 07:10:16.979606 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-92ce-account-create-update-clqvf" event={"ID":"53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27","Type":"ContainerDied","Data":"2fa399e313ba09c6c2f05ded479e44437ef840c932e96bb4f5e4fda5b0f14797"} Dec 10 07:10:17 crc kubenswrapper[4765]: I1210 07:10:17.015327 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.038289467 podStartE2EDuration="9.015298579s" podCreationTimestamp="2025-12-10 07:10:08 +0000 UTC" firstStartedPulling="2025-12-10 07:10:09.72828786 +0000 UTC m=+1329.454953176" lastFinishedPulling="2025-12-10 07:10:15.705296972 +0000 UTC m=+1335.431962288" observedRunningTime="2025-12-10 07:10:17.013120277 +0000 UTC m=+1336.739785623" watchObservedRunningTime="2025-12-10 07:10:17.015298579 +0000 UTC m=+1336.741963905" Dec 10 07:10:17 crc kubenswrapper[4765]: I1210 07:10:17.090675 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.141348968 podStartE2EDuration="5.090651782s" podCreationTimestamp="2025-12-10 07:10:12 +0000 UTC" firstStartedPulling="2025-12-10 07:10:13.942560986 +0000 UTC m=+1333.669226292" lastFinishedPulling="2025-12-10 07:10:15.89186379 +0000 UTC m=+1335.618529106" observedRunningTime="2025-12-10 07:10:17.0614103 +0000 UTC m=+1336.788075626" watchObservedRunningTime="2025-12-10 07:10:17.090651782 +0000 UTC m=+1336.817317098" Dec 10 07:10:17 crc kubenswrapper[4765]: I1210 07:10:17.778314 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 10 07:10:17 crc kubenswrapper[4765]: I1210 07:10:17.778631 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e13fb850-ffa8-4878-a9c0-4b3e573e8f05" containerName="glance-log" containerID="cri-o://c15c2f490ff8394ffe1c2fa4839597e8ec8344efc1ccaf0f0cffc344c9ae6391" gracePeriod=30 Dec 10 07:10:17 crc kubenswrapper[4765]: I1210 07:10:17.778749 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e13fb850-ffa8-4878-a9c0-4b3e573e8f05" containerName="glance-httpd" containerID="cri-o://e1c63edc425dc442f6fd790a994ed00149e9a439553175ea45fed29700765c2c" gracePeriod=30 Dec 10 07:10:17 crc kubenswrapper[4765]: I1210 07:10:17.996246 4765 generic.go:334] "Generic (PLEG): container finished" podID="e13fb850-ffa8-4878-a9c0-4b3e573e8f05" containerID="c15c2f490ff8394ffe1c2fa4839597e8ec8344efc1ccaf0f0cffc344c9ae6391" exitCode=143 Dec 10 07:10:17 crc kubenswrapper[4765]: I1210 07:10:17.996298 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e13fb850-ffa8-4878-a9c0-4b3e573e8f05","Type":"ContainerDied","Data":"c15c2f490ff8394ffe1c2fa4839597e8ec8344efc1ccaf0f0cffc344c9ae6391"} Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:17.999968 4765 generic.go:334] "Generic (PLEG): container finished" podID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerID="47d0ce59c285b5f929d32e5447d6b48e2628860d076018eb073e371e9b012beb" exitCode=0 Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.000000 4765 generic.go:334] "Generic (PLEG): container finished" podID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerID="f6bc7e15a3c9cc5e40a9971d975e46511c895b183aba2e6f87f9e3e2cffa824e" exitCode=2 Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.000010 4765 generic.go:334] "Generic (PLEG): container finished" podID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerID="7a1a3cbc3b2789631ff329398e6cdf91a13a4bf48c3ff4b3239199f7345881db" exitCode=0 Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.000019 4765 generic.go:334] "Generic (PLEG): container finished" podID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerID="dfd2c4b357076553f899d799cef143571917022cc975f95c96aaba7fb3303908" exitCode=0 Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.000037 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad2872ee-df3d-42a4-aac0-2d41ab8a8507","Type":"ContainerDied","Data":"47d0ce59c285b5f929d32e5447d6b48e2628860d076018eb073e371e9b012beb"} Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.000075 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad2872ee-df3d-42a4-aac0-2d41ab8a8507","Type":"ContainerDied","Data":"f6bc7e15a3c9cc5e40a9971d975e46511c895b183aba2e6f87f9e3e2cffa824e"} Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.000104 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad2872ee-df3d-42a4-aac0-2d41ab8a8507","Type":"ContainerDied","Data":"7a1a3cbc3b2789631ff329398e6cdf91a13a4bf48c3ff4b3239199f7345881db"} Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.000118 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad2872ee-df3d-42a4-aac0-2d41ab8a8507","Type":"ContainerDied","Data":"dfd2c4b357076553f899d799cef143571917022cc975f95c96aaba7fb3303908"} Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.564948 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-92ce-account-create-update-clqvf" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.632942 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27-operator-scripts\") pod \"53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27\" (UID: \"53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.633063 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5kzn\" (UniqueName: \"kubernetes.io/projected/53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27-kube-api-access-g5kzn\") pod \"53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27\" (UID: \"53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.633679 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27" (UID: "53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.638316 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.660985 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27-kube-api-access-g5kzn" (OuterVolumeSpecName: "kube-api-access-g5kzn") pod "53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27" (UID: "53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27"). InnerVolumeSpecName "kube-api-access-g5kzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.746836 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5kzn\" (UniqueName: \"kubernetes.io/projected/53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27-kube-api-access-g5kzn\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.858454 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.865507 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1a4f-account-create-update-mw9d9" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.876811 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-2nwrc" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.893163 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-vclgh" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.901234 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-sxcwt" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.912956 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4d32-account-create-update-mpglj" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.950278 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fbf6764-af16-4874-8b04-94c3cafebed7-operator-scripts\") pod \"2fbf6764-af16-4874-8b04-94c3cafebed7\" (UID: \"2fbf6764-af16-4874-8b04-94c3cafebed7\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.950351 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgkcl\" (UniqueName: \"kubernetes.io/projected/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-kube-api-access-dgkcl\") pod \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.950381 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gss5t\" (UniqueName: \"kubernetes.io/projected/2fbf6764-af16-4874-8b04-94c3cafebed7-kube-api-access-gss5t\") pod \"2fbf6764-af16-4874-8b04-94c3cafebed7\" (UID: \"2fbf6764-af16-4874-8b04-94c3cafebed7\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.950468 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-run-httpd\") pod \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.950499 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cdf8425c-b4a5-43ee-9cce-870ae66cdb6a-operator-scripts\") pod \"cdf8425c-b4a5-43ee-9cce-870ae66cdb6a\" (UID: \"cdf8425c-b4a5-43ee-9cce-870ae66cdb6a\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.950522 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhs65\" (UniqueName: \"kubernetes.io/projected/02844920-dce5-4755-a9f7-794a1a95ed34-kube-api-access-bhs65\") pod \"02844920-dce5-4755-a9f7-794a1a95ed34\" (UID: \"02844920-dce5-4755-a9f7-794a1a95ed34\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.950552 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02844920-dce5-4755-a9f7-794a1a95ed34-operator-scripts\") pod \"02844920-dce5-4755-a9f7-794a1a95ed34\" (UID: \"02844920-dce5-4755-a9f7-794a1a95ed34\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.950584 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgzrm\" (UniqueName: \"kubernetes.io/projected/df56d76b-89b4-47f6-a7e6-386d4d49ff43-kube-api-access-sgzrm\") pod \"df56d76b-89b4-47f6-a7e6-386d4d49ff43\" (UID: \"df56d76b-89b4-47f6-a7e6-386d4d49ff43\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.950617 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-scripts\") pod \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.950652 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df56d76b-89b4-47f6-a7e6-386d4d49ff43-operator-scripts\") pod \"df56d76b-89b4-47f6-a7e6-386d4d49ff43\" (UID: \"df56d76b-89b4-47f6-a7e6-386d4d49ff43\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.951003 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9tbf\" (UniqueName: \"kubernetes.io/projected/cdf8425c-b4a5-43ee-9cce-870ae66cdb6a-kube-api-access-t9tbf\") pod \"cdf8425c-b4a5-43ee-9cce-870ae66cdb6a\" (UID: \"cdf8425c-b4a5-43ee-9cce-870ae66cdb6a\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.951039 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-sg-core-conf-yaml\") pod \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.951106 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-log-httpd\") pod \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.951151 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fea25c4-5340-4fab-87cd-22fa2bea0028-operator-scripts\") pod \"7fea25c4-5340-4fab-87cd-22fa2bea0028\" (UID: \"7fea25c4-5340-4fab-87cd-22fa2bea0028\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.951177 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-config-data\") pod \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.951217 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbng9\" (UniqueName: \"kubernetes.io/projected/7fea25c4-5340-4fab-87cd-22fa2bea0028-kube-api-access-hbng9\") pod \"7fea25c4-5340-4fab-87cd-22fa2bea0028\" (UID: \"7fea25c4-5340-4fab-87cd-22fa2bea0028\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.951257 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-combined-ca-bundle\") pod \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\" (UID: \"ad2872ee-df3d-42a4-aac0-2d41ab8a8507\") " Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.954252 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ad2872ee-df3d-42a4-aac0-2d41ab8a8507" (UID: "ad2872ee-df3d-42a4-aac0-2d41ab8a8507"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.954701 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df56d76b-89b4-47f6-a7e6-386d4d49ff43-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "df56d76b-89b4-47f6-a7e6-386d4d49ff43" (UID: "df56d76b-89b4-47f6-a7e6-386d4d49ff43"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.954861 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/02844920-dce5-4755-a9f7-794a1a95ed34-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "02844920-dce5-4755-a9f7-794a1a95ed34" (UID: "02844920-dce5-4755-a9f7-794a1a95ed34"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.955457 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7fea25c4-5340-4fab-87cd-22fa2bea0028-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7fea25c4-5340-4fab-87cd-22fa2bea0028" (UID: "7fea25c4-5340-4fab-87cd-22fa2bea0028"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.956067 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ad2872ee-df3d-42a4-aac0-2d41ab8a8507" (UID: "ad2872ee-df3d-42a4-aac0-2d41ab8a8507"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.957809 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cdf8425c-b4a5-43ee-9cce-870ae66cdb6a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cdf8425c-b4a5-43ee-9cce-870ae66cdb6a" (UID: "cdf8425c-b4a5-43ee-9cce-870ae66cdb6a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.959031 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fbf6764-af16-4874-8b04-94c3cafebed7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2fbf6764-af16-4874-8b04-94c3cafebed7" (UID: "2fbf6764-af16-4874-8b04-94c3cafebed7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.964495 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fbf6764-af16-4874-8b04-94c3cafebed7-kube-api-access-gss5t" (OuterVolumeSpecName: "kube-api-access-gss5t") pod "2fbf6764-af16-4874-8b04-94c3cafebed7" (UID: "2fbf6764-af16-4874-8b04-94c3cafebed7"). InnerVolumeSpecName "kube-api-access-gss5t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.966228 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdf8425c-b4a5-43ee-9cce-870ae66cdb6a-kube-api-access-t9tbf" (OuterVolumeSpecName: "kube-api-access-t9tbf") pod "cdf8425c-b4a5-43ee-9cce-870ae66cdb6a" (UID: "cdf8425c-b4a5-43ee-9cce-870ae66cdb6a"). InnerVolumeSpecName "kube-api-access-t9tbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.971003 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df56d76b-89b4-47f6-a7e6-386d4d49ff43-kube-api-access-sgzrm" (OuterVolumeSpecName: "kube-api-access-sgzrm") pod "df56d76b-89b4-47f6-a7e6-386d4d49ff43" (UID: "df56d76b-89b4-47f6-a7e6-386d4d49ff43"). InnerVolumeSpecName "kube-api-access-sgzrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.975235 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-scripts" (OuterVolumeSpecName: "scripts") pod "ad2872ee-df3d-42a4-aac0-2d41ab8a8507" (UID: "ad2872ee-df3d-42a4-aac0-2d41ab8a8507"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.975672 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02844920-dce5-4755-a9f7-794a1a95ed34-kube-api-access-bhs65" (OuterVolumeSpecName: "kube-api-access-bhs65") pod "02844920-dce5-4755-a9f7-794a1a95ed34" (UID: "02844920-dce5-4755-a9f7-794a1a95ed34"). InnerVolumeSpecName "kube-api-access-bhs65". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:10:18 crc kubenswrapper[4765]: I1210 07:10:18.980292 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fea25c4-5340-4fab-87cd-22fa2bea0028-kube-api-access-hbng9" (OuterVolumeSpecName: "kube-api-access-hbng9") pod "7fea25c4-5340-4fab-87cd-22fa2bea0028" (UID: "7fea25c4-5340-4fab-87cd-22fa2bea0028"). InnerVolumeSpecName "kube-api-access-hbng9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:18.998374 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-kube-api-access-dgkcl" (OuterVolumeSpecName: "kube-api-access-dgkcl") pod "ad2872ee-df3d-42a4-aac0-2d41ab8a8507" (UID: "ad2872ee-df3d-42a4-aac0-2d41ab8a8507"). InnerVolumeSpecName "kube-api-access-dgkcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.019222 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-92ce-account-create-update-clqvf" event={"ID":"53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27","Type":"ContainerDied","Data":"601be7cfe3c472f2bc3cb0522ddb0909adc3d1c26bfe5dc290b555a0190cca74"} Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.019253 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-92ce-account-create-update-clqvf" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.019262 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="601be7cfe3c472f2bc3cb0522ddb0909adc3d1c26bfe5dc290b555a0190cca74" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.021892 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4d32-account-create-update-mpglj" event={"ID":"2fbf6764-af16-4874-8b04-94c3cafebed7","Type":"ContainerDied","Data":"bf08d25a8c35a3d61a26a4ec0d3d7bbd2b2f609d993e3a258ca62f9ae240421d"} Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.021913 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4d32-account-create-update-mpglj" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.021917 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bf08d25a8c35a3d61a26a4ec0d3d7bbd2b2f609d993e3a258ca62f9ae240421d" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.022491 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ad2872ee-df3d-42a4-aac0-2d41ab8a8507" (UID: "ad2872ee-df3d-42a4-aac0-2d41ab8a8507"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.029412 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad2872ee-df3d-42a4-aac0-2d41ab8a8507","Type":"ContainerDied","Data":"b53e170e1a8a3a89a135fb18b5821b354b91e5fa1a429538dceab095c5051812"} Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.029536 4765 scope.go:117] "RemoveContainer" containerID="47d0ce59c285b5f929d32e5447d6b48e2628860d076018eb073e371e9b012beb" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.029709 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.036409 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-sxcwt" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.036454 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-sxcwt" event={"ID":"cdf8425c-b4a5-43ee-9cce-870ae66cdb6a","Type":"ContainerDied","Data":"fb63903726b0b2537c83076343d16e9a6a83e30978bc32fd57042da00b5c71fb"} Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.036502 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb63903726b0b2537c83076343d16e9a6a83e30978bc32fd57042da00b5c71fb" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.040292 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1a4f-account-create-update-mw9d9" event={"ID":"02844920-dce5-4755-a9f7-794a1a95ed34","Type":"ContainerDied","Data":"54f6ee556ae0af751cfa3f57bf2e0cb002a0e441ad8d7582dbe62c4ad2873d21"} Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.040349 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54f6ee556ae0af751cfa3f57bf2e0cb002a0e441ad8d7582dbe62c4ad2873d21" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.040465 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1a4f-account-create-update-mw9d9" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.050587 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-vclgh" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.051386 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-vclgh" event={"ID":"7fea25c4-5340-4fab-87cd-22fa2bea0028","Type":"ContainerDied","Data":"5b38f80f7849c324dd83c1cdeb08d63062843a859ca066508ee17f9f3b6a773a"} Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.051450 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b38f80f7849c324dd83c1cdeb08d63062843a859ca066508ee17f9f3b6a773a" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.055179 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-2nwrc" event={"ID":"df56d76b-89b4-47f6-a7e6-386d4d49ff43","Type":"ContainerDied","Data":"22567931eb9aeeffc16a4eb24526342096afdd429ae3239e662fb8489913748f"} Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.055216 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22567931eb9aeeffc16a4eb24526342096afdd429ae3239e662fb8489913748f" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.055251 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df56d76b-89b4-47f6-a7e6-386d4d49ff43-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.055270 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-2nwrc" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.055279 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9tbf\" (UniqueName: \"kubernetes.io/projected/cdf8425c-b4a5-43ee-9cce-870ae66cdb6a-kube-api-access-t9tbf\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.055294 4765 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.055308 4765 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.055320 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fea25c4-5340-4fab-87cd-22fa2bea0028-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.055332 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbng9\" (UniqueName: \"kubernetes.io/projected/7fea25c4-5340-4fab-87cd-22fa2bea0028-kube-api-access-hbng9\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.055344 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fbf6764-af16-4874-8b04-94c3cafebed7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.055746 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgkcl\" (UniqueName: \"kubernetes.io/projected/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-kube-api-access-dgkcl\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.055998 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gss5t\" (UniqueName: \"kubernetes.io/projected/2fbf6764-af16-4874-8b04-94c3cafebed7-kube-api-access-gss5t\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.056029 4765 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.056042 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cdf8425c-b4a5-43ee-9cce-870ae66cdb6a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.056055 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhs65\" (UniqueName: \"kubernetes.io/projected/02844920-dce5-4755-a9f7-794a1a95ed34-kube-api-access-bhs65\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.056068 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02844920-dce5-4755-a9f7-794a1a95ed34-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.056145 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgzrm\" (UniqueName: \"kubernetes.io/projected/df56d76b-89b4-47f6-a7e6-386d4d49ff43-kube-api-access-sgzrm\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.056161 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.072950 4765 scope.go:117] "RemoveContainer" containerID="f6bc7e15a3c9cc5e40a9971d975e46511c895b183aba2e6f87f9e3e2cffa824e" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.091968 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ad2872ee-df3d-42a4-aac0-2d41ab8a8507" (UID: "ad2872ee-df3d-42a4-aac0-2d41ab8a8507"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.109321 4765 scope.go:117] "RemoveContainer" containerID="7a1a3cbc3b2789631ff329398e6cdf91a13a4bf48c3ff4b3239199f7345881db" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.138938 4765 scope.go:117] "RemoveContainer" containerID="dfd2c4b357076553f899d799cef143571917022cc975f95c96aaba7fb3303908" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.157950 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.178214 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-config-data" (OuterVolumeSpecName: "config-data") pod "ad2872ee-df3d-42a4-aac0-2d41ab8a8507" (UID: "ad2872ee-df3d-42a4-aac0-2d41ab8a8507"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.259945 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad2872ee-df3d-42a4-aac0-2d41ab8a8507-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.364843 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.375534 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.405600 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:10:19 crc kubenswrapper[4765]: E1210 07:10:19.406119 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fbf6764-af16-4874-8b04-94c3cafebed7" containerName="mariadb-account-create-update" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406135 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fbf6764-af16-4874-8b04-94c3cafebed7" containerName="mariadb-account-create-update" Dec 10 07:10:19 crc kubenswrapper[4765]: E1210 07:10:19.406153 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdf8425c-b4a5-43ee-9cce-870ae66cdb6a" containerName="mariadb-database-create" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406160 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdf8425c-b4a5-43ee-9cce-870ae66cdb6a" containerName="mariadb-database-create" Dec 10 07:10:19 crc kubenswrapper[4765]: E1210 07:10:19.406172 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27" containerName="mariadb-account-create-update" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406181 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27" containerName="mariadb-account-create-update" Dec 10 07:10:19 crc kubenswrapper[4765]: E1210 07:10:19.406204 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerName="sg-core" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406211 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerName="sg-core" Dec 10 07:10:19 crc kubenswrapper[4765]: E1210 07:10:19.406236 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02844920-dce5-4755-a9f7-794a1a95ed34" containerName="mariadb-account-create-update" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406244 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="02844920-dce5-4755-a9f7-794a1a95ed34" containerName="mariadb-account-create-update" Dec 10 07:10:19 crc kubenswrapper[4765]: E1210 07:10:19.406265 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerName="ceilometer-notification-agent" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406274 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerName="ceilometer-notification-agent" Dec 10 07:10:19 crc kubenswrapper[4765]: E1210 07:10:19.406290 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerName="ceilometer-central-agent" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406299 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerName="ceilometer-central-agent" Dec 10 07:10:19 crc kubenswrapper[4765]: E1210 07:10:19.406312 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fea25c4-5340-4fab-87cd-22fa2bea0028" containerName="mariadb-database-create" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406320 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fea25c4-5340-4fab-87cd-22fa2bea0028" containerName="mariadb-database-create" Dec 10 07:10:19 crc kubenswrapper[4765]: E1210 07:10:19.406336 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df56d76b-89b4-47f6-a7e6-386d4d49ff43" containerName="mariadb-database-create" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406345 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="df56d76b-89b4-47f6-a7e6-386d4d49ff43" containerName="mariadb-database-create" Dec 10 07:10:19 crc kubenswrapper[4765]: E1210 07:10:19.406355 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerName="proxy-httpd" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406362 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerName="proxy-httpd" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406582 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerName="ceilometer-central-agent" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406605 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdf8425c-b4a5-43ee-9cce-870ae66cdb6a" containerName="mariadb-database-create" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406620 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fbf6764-af16-4874-8b04-94c3cafebed7" containerName="mariadb-account-create-update" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406633 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerName="ceilometer-notification-agent" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406647 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fea25c4-5340-4fab-87cd-22fa2bea0028" containerName="mariadb-database-create" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406664 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerName="proxy-httpd" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406675 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="df56d76b-89b4-47f6-a7e6-386d4d49ff43" containerName="mariadb-database-create" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406686 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="02844920-dce5-4755-a9f7-794a1a95ed34" containerName="mariadb-account-create-update" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406704 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27" containerName="mariadb-account-create-update" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.406717 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" containerName="sg-core" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.408755 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.411895 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.412125 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.412277 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.423181 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.464949 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-scripts\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.465974 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.466112 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q447s\" (UniqueName: \"kubernetes.io/projected/02840a32-864e-4d6a-9deb-fffc5397653b-kube-api-access-q447s\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.466314 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.466440 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02840a32-864e-4d6a-9deb-fffc5397653b-log-httpd\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.466673 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02840a32-864e-4d6a-9deb-fffc5397653b-run-httpd\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.466806 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-config-data\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.466951 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.569314 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-scripts\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.569478 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.569534 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q447s\" (UniqueName: \"kubernetes.io/projected/02840a32-864e-4d6a-9deb-fffc5397653b-kube-api-access-q447s\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.569558 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.569587 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02840a32-864e-4d6a-9deb-fffc5397653b-log-httpd\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.569633 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02840a32-864e-4d6a-9deb-fffc5397653b-run-httpd\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.569666 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-config-data\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.569695 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.570923 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02840a32-864e-4d6a-9deb-fffc5397653b-run-httpd\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.571421 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02840a32-864e-4d6a-9deb-fffc5397653b-log-httpd\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.580248 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.580373 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-scripts\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.580503 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.580624 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-config-data\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.583921 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.594350 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q447s\" (UniqueName: \"kubernetes.io/projected/02840a32-864e-4d6a-9deb-fffc5397653b-kube-api-access-q447s\") pod \"ceilometer-0\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " pod="openstack/ceilometer-0" Dec 10 07:10:19 crc kubenswrapper[4765]: I1210 07:10:19.803773 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.093914 4765 generic.go:334] "Generic (PLEG): container finished" podID="7cded555-d21d-4dac-9806-a6e1071683c1" containerID="76e8fe88229afb1d9ca3ad856a94ccb359d8115b2efae3b4af33f3bfb4e92487" exitCode=0 Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.094214 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7cded555-d21d-4dac-9806-a6e1071683c1","Type":"ContainerDied","Data":"76e8fe88229afb1d9ca3ad856a94ccb359d8115b2efae3b4af33f3bfb4e92487"} Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.382918 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:10:20 crc kubenswrapper[4765]: W1210 07:10:20.395029 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02840a32_864e_4d6a_9deb_fffc5397653b.slice/crio-64e2e065e7ad7b8630559cde076b64494854ebe7197042671ca73b5dbb248b91 WatchSource:0}: Error finding container 64e2e065e7ad7b8630559cde076b64494854ebe7197042671ca73b5dbb248b91: Status 404 returned error can't find the container with id 64e2e065e7ad7b8630559cde076b64494854ebe7197042671ca73b5dbb248b91 Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.551992 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.600834 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7cded555-d21d-4dac-9806-a6e1071683c1-logs\") pod \"7cded555-d21d-4dac-9806-a6e1071683c1\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.600918 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-scripts\") pod \"7cded555-d21d-4dac-9806-a6e1071683c1\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.600980 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7cded555-d21d-4dac-9806-a6e1071683c1-httpd-run\") pod \"7cded555-d21d-4dac-9806-a6e1071683c1\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.601056 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"7cded555-d21d-4dac-9806-a6e1071683c1\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.601116 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-public-tls-certs\") pod \"7cded555-d21d-4dac-9806-a6e1071683c1\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.601262 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-config-data\") pod \"7cded555-d21d-4dac-9806-a6e1071683c1\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.601386 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p75rd\" (UniqueName: \"kubernetes.io/projected/7cded555-d21d-4dac-9806-a6e1071683c1-kube-api-access-p75rd\") pod \"7cded555-d21d-4dac-9806-a6e1071683c1\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.601427 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-combined-ca-bundle\") pod \"7cded555-d21d-4dac-9806-a6e1071683c1\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.609181 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cded555-d21d-4dac-9806-a6e1071683c1-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "7cded555-d21d-4dac-9806-a6e1071683c1" (UID: "7cded555-d21d-4dac-9806-a6e1071683c1"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.609409 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cded555-d21d-4dac-9806-a6e1071683c1-logs" (OuterVolumeSpecName: "logs") pod "7cded555-d21d-4dac-9806-a6e1071683c1" (UID: "7cded555-d21d-4dac-9806-a6e1071683c1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.617891 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cded555-d21d-4dac-9806-a6e1071683c1-kube-api-access-p75rd" (OuterVolumeSpecName: "kube-api-access-p75rd") pod "7cded555-d21d-4dac-9806-a6e1071683c1" (UID: "7cded555-d21d-4dac-9806-a6e1071683c1"). InnerVolumeSpecName "kube-api-access-p75rd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.618462 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad2872ee-df3d-42a4-aac0-2d41ab8a8507" path="/var/lib/kubelet/pods/ad2872ee-df3d-42a4-aac0-2d41ab8a8507/volumes" Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.622281 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-scripts" (OuterVolumeSpecName: "scripts") pod "7cded555-d21d-4dac-9806-a6e1071683c1" (UID: "7cded555-d21d-4dac-9806-a6e1071683c1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.639971 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "7cded555-d21d-4dac-9806-a6e1071683c1" (UID: "7cded555-d21d-4dac-9806-a6e1071683c1"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.685695 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7cded555-d21d-4dac-9806-a6e1071683c1" (UID: "7cded555-d21d-4dac-9806-a6e1071683c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.704290 4765 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.704327 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p75rd\" (UniqueName: \"kubernetes.io/projected/7cded555-d21d-4dac-9806-a6e1071683c1-kube-api-access-p75rd\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.704339 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.704349 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7cded555-d21d-4dac-9806-a6e1071683c1-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.704359 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.704369 4765 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7cded555-d21d-4dac-9806-a6e1071683c1-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:20 crc kubenswrapper[4765]: E1210 07:10:20.743503 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-public-tls-certs podName:7cded555-d21d-4dac-9806-a6e1071683c1 nodeName:}" failed. No retries permitted until 2025-12-10 07:10:21.243467387 +0000 UTC m=+1340.970132703 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-public-tls-certs") pod "7cded555-d21d-4dac-9806-a6e1071683c1" (UID: "7cded555-d21d-4dac-9806-a6e1071683c1") : error deleting /var/lib/kubelet/pods/7cded555-d21d-4dac-9806-a6e1071683c1/volume-subpaths: remove /var/lib/kubelet/pods/7cded555-d21d-4dac-9806-a6e1071683c1/volume-subpaths: no such file or directory Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.748033 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-config-data" (OuterVolumeSpecName: "config-data") pod "7cded555-d21d-4dac-9806-a6e1071683c1" (UID: "7cded555-d21d-4dac-9806-a6e1071683c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.753446 4765 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.805820 4765 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:20 crc kubenswrapper[4765]: I1210 07:10:20.806125 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.187656 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7cded555-d21d-4dac-9806-a6e1071683c1","Type":"ContainerDied","Data":"55aa5f9ad7167502e70c7524a9ca0986bc53678ca59f2f3da255499cc7273986"} Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.187794 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.187971 4765 scope.go:117] "RemoveContainer" containerID="76e8fe88229afb1d9ca3ad856a94ccb359d8115b2efae3b4af33f3bfb4e92487" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.200235 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02840a32-864e-4d6a-9deb-fffc5397653b","Type":"ContainerStarted","Data":"64e2e065e7ad7b8630559cde076b64494854ebe7197042671ca73b5dbb248b91"} Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.254269 4765 scope.go:117] "RemoveContainer" containerID="7f219892c61ed822df7b7ed0213204ecc23cae587dcf3561a460b12cc40a5a70" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.325049 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-public-tls-certs\") pod \"7cded555-d21d-4dac-9806-a6e1071683c1\" (UID: \"7cded555-d21d-4dac-9806-a6e1071683c1\") " Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.329541 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7cded555-d21d-4dac-9806-a6e1071683c1" (UID: "7cded555-d21d-4dac-9806-a6e1071683c1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.427749 4765 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7cded555-d21d-4dac-9806-a6e1071683c1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.592136 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.617390 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.631243 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 10 07:10:21 crc kubenswrapper[4765]: E1210 07:10:21.631700 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cded555-d21d-4dac-9806-a6e1071683c1" containerName="glance-log" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.631718 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cded555-d21d-4dac-9806-a6e1071683c1" containerName="glance-log" Dec 10 07:10:21 crc kubenswrapper[4765]: E1210 07:10:21.631756 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cded555-d21d-4dac-9806-a6e1071683c1" containerName="glance-httpd" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.631763 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cded555-d21d-4dac-9806-a6e1071683c1" containerName="glance-httpd" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.631955 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cded555-d21d-4dac-9806-a6e1071683c1" containerName="glance-httpd" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.631975 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cded555-d21d-4dac-9806-a6e1071683c1" containerName="glance-log" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.633061 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.645567 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.646762 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.659661 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.735217 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.735314 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7jlp\" (UniqueName: \"kubernetes.io/projected/b1099ee9-e4d7-496f-b35e-7617ee456898-kube-api-access-j7jlp\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.735354 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.735410 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-config-data\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.735484 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1099ee9-e4d7-496f-b35e-7617ee456898-logs\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.735527 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b1099ee9-e4d7-496f-b35e-7617ee456898-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.735603 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.735639 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-scripts\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.838156 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.838544 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7jlp\" (UniqueName: \"kubernetes.io/projected/b1099ee9-e4d7-496f-b35e-7617ee456898-kube-api-access-j7jlp\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.838587 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.838632 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-config-data\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.838672 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1099ee9-e4d7-496f-b35e-7617ee456898-logs\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.838689 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b1099ee9-e4d7-496f-b35e-7617ee456898-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.838739 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.838775 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-scripts\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.840155 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b1099ee9-e4d7-496f-b35e-7617ee456898-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.840280 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1099ee9-e4d7-496f-b35e-7617ee456898-logs\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.841965 4765 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.848669 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-scripts\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.853707 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.861702 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-config-data\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.863985 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7jlp\" (UniqueName: \"kubernetes.io/projected/b1099ee9-e4d7-496f-b35e-7617ee456898-kube-api-access-j7jlp\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.864698 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.914898 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " pod="openstack/glance-default-external-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.934669 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 10 07:10:21 crc kubenswrapper[4765]: I1210 07:10:21.973943 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.044349 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-combined-ca-bundle\") pod \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.044430 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-config-data\") pod \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.044574 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.044650 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hp9vj\" (UniqueName: \"kubernetes.io/projected/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-kube-api-access-hp9vj\") pod \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.044731 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-internal-tls-certs\") pod \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.044781 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-httpd-run\") pod \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.044888 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-logs\") pod \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.044926 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-scripts\") pod \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\" (UID: \"e13fb850-ffa8-4878-a9c0-4b3e573e8f05\") " Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.054327 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "e13fb850-ffa8-4878-a9c0-4b3e573e8f05" (UID: "e13fb850-ffa8-4878-a9c0-4b3e573e8f05"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.056474 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-kube-api-access-hp9vj" (OuterVolumeSpecName: "kube-api-access-hp9vj") pod "e13fb850-ffa8-4878-a9c0-4b3e573e8f05" (UID: "e13fb850-ffa8-4878-a9c0-4b3e573e8f05"). InnerVolumeSpecName "kube-api-access-hp9vj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.060491 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-scripts" (OuterVolumeSpecName: "scripts") pod "e13fb850-ffa8-4878-a9c0-4b3e573e8f05" (UID: "e13fb850-ffa8-4878-a9c0-4b3e573e8f05"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.061183 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-logs" (OuterVolumeSpecName: "logs") pod "e13fb850-ffa8-4878-a9c0-4b3e573e8f05" (UID: "e13fb850-ffa8-4878-a9c0-4b3e573e8f05"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.076944 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "e13fb850-ffa8-4878-a9c0-4b3e573e8f05" (UID: "e13fb850-ffa8-4878-a9c0-4b3e573e8f05"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.122840 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e13fb850-ffa8-4878-a9c0-4b3e573e8f05" (UID: "e13fb850-ffa8-4878-a9c0-4b3e573e8f05"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.146750 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-config-data" (OuterVolumeSpecName: "config-data") pod "e13fb850-ffa8-4878-a9c0-4b3e573e8f05" (UID: "e13fb850-ffa8-4878-a9c0-4b3e573e8f05"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.149211 4765 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.149352 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hp9vj\" (UniqueName: \"kubernetes.io/projected/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-kube-api-access-hp9vj\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.149418 4765 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.149474 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.149539 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.149621 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.149678 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.174124 4765 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.202971 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e13fb850-ffa8-4878-a9c0-4b3e573e8f05" (UID: "e13fb850-ffa8-4878-a9c0-4b3e573e8f05"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.254187 4765 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.254427 4765 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e13fb850-ffa8-4878-a9c0-4b3e573e8f05-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.293810 4765 generic.go:334] "Generic (PLEG): container finished" podID="e13fb850-ffa8-4878-a9c0-4b3e573e8f05" containerID="e1c63edc425dc442f6fd790a994ed00149e9a439553175ea45fed29700765c2c" exitCode=0 Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.293947 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e13fb850-ffa8-4878-a9c0-4b3e573e8f05","Type":"ContainerDied","Data":"e1c63edc425dc442f6fd790a994ed00149e9a439553175ea45fed29700765c2c"} Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.293984 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e13fb850-ffa8-4878-a9c0-4b3e573e8f05","Type":"ContainerDied","Data":"a0efed9797258a13105a3de4e32240d1adefe0bf511129d21528a8994575b0e0"} Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.294006 4765 scope.go:117] "RemoveContainer" containerID="e1c63edc425dc442f6fd790a994ed00149e9a439553175ea45fed29700765c2c" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.294845 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.305059 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02840a32-864e-4d6a-9deb-fffc5397653b","Type":"ContainerStarted","Data":"d4796b17aa0ab49644757eca2fad5bcfb573b8c40ef4c6b4b78436f38bd76f16"} Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.362463 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.393870 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.423071 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 10 07:10:22 crc kubenswrapper[4765]: E1210 07:10:22.423559 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e13fb850-ffa8-4878-a9c0-4b3e573e8f05" containerName="glance-httpd" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.423572 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e13fb850-ffa8-4878-a9c0-4b3e573e8f05" containerName="glance-httpd" Dec 10 07:10:22 crc kubenswrapper[4765]: E1210 07:10:22.423598 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e13fb850-ffa8-4878-a9c0-4b3e573e8f05" containerName="glance-log" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.423605 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e13fb850-ffa8-4878-a9c0-4b3e573e8f05" containerName="glance-log" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.423786 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="e13fb850-ffa8-4878-a9c0-4b3e573e8f05" containerName="glance-httpd" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.423796 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="e13fb850-ffa8-4878-a9c0-4b3e573e8f05" containerName="glance-log" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.425244 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.435894 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.436167 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.475163 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.476642 4765 scope.go:117] "RemoveContainer" containerID="c15c2f490ff8394ffe1c2fa4839597e8ec8344efc1ccaf0f0cffc344c9ae6391" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.563331 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.563452 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.563489 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.563527 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z787f\" (UniqueName: \"kubernetes.io/projected/48199101-c7d2-4881-98bd-53d14d7308d5-kube-api-access-z787f\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.563549 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48199101-c7d2-4881-98bd-53d14d7308d5-logs\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.563566 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/48199101-c7d2-4881-98bd-53d14d7308d5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.563607 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.563641 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.582328 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.592072 4765 scope.go:117] "RemoveContainer" containerID="e1c63edc425dc442f6fd790a994ed00149e9a439553175ea45fed29700765c2c" Dec 10 07:10:22 crc kubenswrapper[4765]: E1210 07:10:22.598630 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1c63edc425dc442f6fd790a994ed00149e9a439553175ea45fed29700765c2c\": container with ID starting with e1c63edc425dc442f6fd790a994ed00149e9a439553175ea45fed29700765c2c not found: ID does not exist" containerID="e1c63edc425dc442f6fd790a994ed00149e9a439553175ea45fed29700765c2c" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.598671 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1c63edc425dc442f6fd790a994ed00149e9a439553175ea45fed29700765c2c"} err="failed to get container status \"e1c63edc425dc442f6fd790a994ed00149e9a439553175ea45fed29700765c2c\": rpc error: code = NotFound desc = could not find container \"e1c63edc425dc442f6fd790a994ed00149e9a439553175ea45fed29700765c2c\": container with ID starting with e1c63edc425dc442f6fd790a994ed00149e9a439553175ea45fed29700765c2c not found: ID does not exist" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.598702 4765 scope.go:117] "RemoveContainer" containerID="c15c2f490ff8394ffe1c2fa4839597e8ec8344efc1ccaf0f0cffc344c9ae6391" Dec 10 07:10:22 crc kubenswrapper[4765]: E1210 07:10:22.599741 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c15c2f490ff8394ffe1c2fa4839597e8ec8344efc1ccaf0f0cffc344c9ae6391\": container with ID starting with c15c2f490ff8394ffe1c2fa4839597e8ec8344efc1ccaf0f0cffc344c9ae6391 not found: ID does not exist" containerID="c15c2f490ff8394ffe1c2fa4839597e8ec8344efc1ccaf0f0cffc344c9ae6391" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.599766 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c15c2f490ff8394ffe1c2fa4839597e8ec8344efc1ccaf0f0cffc344c9ae6391"} err="failed to get container status \"c15c2f490ff8394ffe1c2fa4839597e8ec8344efc1ccaf0f0cffc344c9ae6391\": rpc error: code = NotFound desc = could not find container \"c15c2f490ff8394ffe1c2fa4839597e8ec8344efc1ccaf0f0cffc344c9ae6391\": container with ID starting with c15c2f490ff8394ffe1c2fa4839597e8ec8344efc1ccaf0f0cffc344c9ae6391 not found: ID does not exist" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.603816 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7cded555-d21d-4dac-9806-a6e1071683c1" path="/var/lib/kubelet/pods/7cded555-d21d-4dac-9806-a6e1071683c1/volumes" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.606802 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e13fb850-ffa8-4878-a9c0-4b3e573e8f05" path="/var/lib/kubelet/pods/e13fb850-ffa8-4878-a9c0-4b3e573e8f05/volumes" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.665681 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.665853 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.665905 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.665955 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z787f\" (UniqueName: \"kubernetes.io/projected/48199101-c7d2-4881-98bd-53d14d7308d5-kube-api-access-z787f\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.665986 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48199101-c7d2-4881-98bd-53d14d7308d5-logs\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.666010 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/48199101-c7d2-4881-98bd-53d14d7308d5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.666075 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.666135 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.667565 4765 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.667587 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48199101-c7d2-4881-98bd-53d14d7308d5-logs\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.675178 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.675197 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.675617 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.677509 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.671338 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/48199101-c7d2-4881-98bd-53d14d7308d5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.693234 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z787f\" (UniqueName: \"kubernetes.io/projected/48199101-c7d2-4881-98bd-53d14d7308d5-kube-api-access-z787f\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.715003 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " pod="openstack/glance-default-internal-api-0" Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.762829 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 10 07:10:22 crc kubenswrapper[4765]: I1210 07:10:22.856807 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 10 07:10:23 crc kubenswrapper[4765]: I1210 07:10:23.245415 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 10 07:10:23 crc kubenswrapper[4765]: I1210 07:10:23.319682 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02840a32-864e-4d6a-9deb-fffc5397653b","Type":"ContainerStarted","Data":"a832827d2751a8011390d3c13351b0e61bd3af97fecc37db2f39502eb67b9861"} Dec 10 07:10:23 crc kubenswrapper[4765]: I1210 07:10:23.330930 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b1099ee9-e4d7-496f-b35e-7617ee456898","Type":"ContainerStarted","Data":"4bca801116da3647c162bffb7d23aa64d681ee0510e8f7af5ae07cbe245a43e4"} Dec 10 07:10:23 crc kubenswrapper[4765]: I1210 07:10:23.551332 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.408493 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"48199101-c7d2-4881-98bd-53d14d7308d5","Type":"ContainerStarted","Data":"381b0f39c40da82fd302ed8b05b7fb0835e8579bf2ab1f78541c95f23b62aade"} Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.443816 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b1099ee9-e4d7-496f-b35e-7617ee456898","Type":"ContainerStarted","Data":"532e6ef7ce06ebf94ad58b81fb2331379d4d7d0f9d8d702b8370e40c41c43459"} Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.478354 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02840a32-864e-4d6a-9deb-fffc5397653b","Type":"ContainerStarted","Data":"d65c3a9331e9da6fed433a21d6cc45087665ff2f8aa551b25b7f1fcf846e9b96"} Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.509601 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-5pfzm"] Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.511925 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-5pfzm" Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.520680 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-5pfzm"] Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.523671 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-7j8ws" Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.523915 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.524157 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.624774 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-scripts\") pod \"nova-cell0-conductor-db-sync-5pfzm\" (UID: \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\") " pod="openstack/nova-cell0-conductor-db-sync-5pfzm" Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.624952 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5j82\" (UniqueName: \"kubernetes.io/projected/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-kube-api-access-d5j82\") pod \"nova-cell0-conductor-db-sync-5pfzm\" (UID: \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\") " pod="openstack/nova-cell0-conductor-db-sync-5pfzm" Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.624999 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-config-data\") pod \"nova-cell0-conductor-db-sync-5pfzm\" (UID: \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\") " pod="openstack/nova-cell0-conductor-db-sync-5pfzm" Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.625028 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-5pfzm\" (UID: \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\") " pod="openstack/nova-cell0-conductor-db-sync-5pfzm" Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.727428 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-scripts\") pod \"nova-cell0-conductor-db-sync-5pfzm\" (UID: \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\") " pod="openstack/nova-cell0-conductor-db-sync-5pfzm" Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.728036 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5j82\" (UniqueName: \"kubernetes.io/projected/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-kube-api-access-d5j82\") pod \"nova-cell0-conductor-db-sync-5pfzm\" (UID: \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\") " pod="openstack/nova-cell0-conductor-db-sync-5pfzm" Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.728230 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-config-data\") pod \"nova-cell0-conductor-db-sync-5pfzm\" (UID: \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\") " pod="openstack/nova-cell0-conductor-db-sync-5pfzm" Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.728272 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-5pfzm\" (UID: \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\") " pod="openstack/nova-cell0-conductor-db-sync-5pfzm" Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.737150 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-5pfzm\" (UID: \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\") " pod="openstack/nova-cell0-conductor-db-sync-5pfzm" Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.740541 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-scripts\") pod \"nova-cell0-conductor-db-sync-5pfzm\" (UID: \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\") " pod="openstack/nova-cell0-conductor-db-sync-5pfzm" Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.749906 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5j82\" (UniqueName: \"kubernetes.io/projected/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-kube-api-access-d5j82\") pod \"nova-cell0-conductor-db-sync-5pfzm\" (UID: \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\") " pod="openstack/nova-cell0-conductor-db-sync-5pfzm" Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.751156 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-config-data\") pod \"nova-cell0-conductor-db-sync-5pfzm\" (UID: \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\") " pod="openstack/nova-cell0-conductor-db-sync-5pfzm" Dec 10 07:10:24 crc kubenswrapper[4765]: I1210 07:10:24.873157 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-5pfzm" Dec 10 07:10:25 crc kubenswrapper[4765]: I1210 07:10:25.395109 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-5pfzm"] Dec 10 07:10:25 crc kubenswrapper[4765]: I1210 07:10:25.496659 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"48199101-c7d2-4881-98bd-53d14d7308d5","Type":"ContainerStarted","Data":"b362cb9831e6868cc5bfea4106470f04254c397b3cd98c304dac462e618c6408"} Dec 10 07:10:25 crc kubenswrapper[4765]: I1210 07:10:25.500887 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b1099ee9-e4d7-496f-b35e-7617ee456898","Type":"ContainerStarted","Data":"90ca341c2978c3ac47bd6f8955762450564583c1ccb4813fdb2ffc303391ba52"} Dec 10 07:10:25 crc kubenswrapper[4765]: I1210 07:10:25.504818 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-5pfzm" event={"ID":"f346b982-1aa1-4398-8a1d-7171c8b9c0e4","Type":"ContainerStarted","Data":"dd0135d0ebd3a4289c90235829cbf9c8ab4cd7946984822ed719af6271f1944c"} Dec 10 07:10:25 crc kubenswrapper[4765]: I1210 07:10:25.511282 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02840a32-864e-4d6a-9deb-fffc5397653b","Type":"ContainerStarted","Data":"232bf85fe1bdb2e2bc1a622edd4c663477d5437f7f353d0c4045fd6875f71b2b"} Dec 10 07:10:25 crc kubenswrapper[4765]: I1210 07:10:25.511544 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="02840a32-864e-4d6a-9deb-fffc5397653b" containerName="ceilometer-central-agent" containerID="cri-o://d4796b17aa0ab49644757eca2fad5bcfb573b8c40ef4c6b4b78436f38bd76f16" gracePeriod=30 Dec 10 07:10:25 crc kubenswrapper[4765]: I1210 07:10:25.511731 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 07:10:25 crc kubenswrapper[4765]: I1210 07:10:25.511806 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="02840a32-864e-4d6a-9deb-fffc5397653b" containerName="proxy-httpd" containerID="cri-o://232bf85fe1bdb2e2bc1a622edd4c663477d5437f7f353d0c4045fd6875f71b2b" gracePeriod=30 Dec 10 07:10:25 crc kubenswrapper[4765]: I1210 07:10:25.511864 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="02840a32-864e-4d6a-9deb-fffc5397653b" containerName="sg-core" containerID="cri-o://d65c3a9331e9da6fed433a21d6cc45087665ff2f8aa551b25b7f1fcf846e9b96" gracePeriod=30 Dec 10 07:10:25 crc kubenswrapper[4765]: I1210 07:10:25.511913 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="02840a32-864e-4d6a-9deb-fffc5397653b" containerName="ceilometer-notification-agent" containerID="cri-o://a832827d2751a8011390d3c13351b0e61bd3af97fecc37db2f39502eb67b9861" gracePeriod=30 Dec 10 07:10:25 crc kubenswrapper[4765]: I1210 07:10:25.535905 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.535882232 podStartE2EDuration="4.535882232s" podCreationTimestamp="2025-12-10 07:10:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:10:25.527001499 +0000 UTC m=+1345.253666825" watchObservedRunningTime="2025-12-10 07:10:25.535882232 +0000 UTC m=+1345.262547548" Dec 10 07:10:25 crc kubenswrapper[4765]: I1210 07:10:25.584541 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.859135827 podStartE2EDuration="6.584511345s" podCreationTimestamp="2025-12-10 07:10:19 +0000 UTC" firstStartedPulling="2025-12-10 07:10:20.397911067 +0000 UTC m=+1340.124576383" lastFinishedPulling="2025-12-10 07:10:25.123286585 +0000 UTC m=+1344.849951901" observedRunningTime="2025-12-10 07:10:25.580077419 +0000 UTC m=+1345.306742745" watchObservedRunningTime="2025-12-10 07:10:25.584511345 +0000 UTC m=+1345.311176661" Dec 10 07:10:26 crc kubenswrapper[4765]: I1210 07:10:26.530042 4765 generic.go:334] "Generic (PLEG): container finished" podID="02840a32-864e-4d6a-9deb-fffc5397653b" containerID="d65c3a9331e9da6fed433a21d6cc45087665ff2f8aa551b25b7f1fcf846e9b96" exitCode=2 Dec 10 07:10:26 crc kubenswrapper[4765]: I1210 07:10:26.530397 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02840a32-864e-4d6a-9deb-fffc5397653b","Type":"ContainerDied","Data":"d65c3a9331e9da6fed433a21d6cc45087665ff2f8aa551b25b7f1fcf846e9b96"} Dec 10 07:10:26 crc kubenswrapper[4765]: I1210 07:10:26.530467 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02840a32-864e-4d6a-9deb-fffc5397653b","Type":"ContainerDied","Data":"a832827d2751a8011390d3c13351b0e61bd3af97fecc37db2f39502eb67b9861"} Dec 10 07:10:26 crc kubenswrapper[4765]: I1210 07:10:26.530415 4765 generic.go:334] "Generic (PLEG): container finished" podID="02840a32-864e-4d6a-9deb-fffc5397653b" containerID="a832827d2751a8011390d3c13351b0e61bd3af97fecc37db2f39502eb67b9861" exitCode=0 Dec 10 07:10:26 crc kubenswrapper[4765]: I1210 07:10:26.534450 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"48199101-c7d2-4881-98bd-53d14d7308d5","Type":"ContainerStarted","Data":"e51ededa81fc983915ee20952321442588bb91e3bed29e48234b289ecdd3cfdc"} Dec 10 07:10:26 crc kubenswrapper[4765]: I1210 07:10:26.581784 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.581764086 podStartE2EDuration="4.581764086s" podCreationTimestamp="2025-12-10 07:10:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:10:26.571447922 +0000 UTC m=+1346.298113238" watchObservedRunningTime="2025-12-10 07:10:26.581764086 +0000 UTC m=+1346.308429402" Dec 10 07:10:31 crc kubenswrapper[4765]: I1210 07:10:31.975466 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 10 07:10:31 crc kubenswrapper[4765]: I1210 07:10:31.976508 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 10 07:10:32 crc kubenswrapper[4765]: I1210 07:10:32.018596 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 10 07:10:32 crc kubenswrapper[4765]: I1210 07:10:32.047926 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 10 07:10:32 crc kubenswrapper[4765]: I1210 07:10:32.614614 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 10 07:10:32 crc kubenswrapper[4765]: I1210 07:10:32.615166 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 10 07:10:32 crc kubenswrapper[4765]: I1210 07:10:32.857274 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 10 07:10:32 crc kubenswrapper[4765]: I1210 07:10:32.857377 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 10 07:10:32 crc kubenswrapper[4765]: I1210 07:10:32.912865 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 10 07:10:32 crc kubenswrapper[4765]: I1210 07:10:32.917681 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 10 07:10:33 crc kubenswrapper[4765]: I1210 07:10:33.636686 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 10 07:10:33 crc kubenswrapper[4765]: I1210 07:10:33.636746 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 10 07:10:34 crc kubenswrapper[4765]: I1210 07:10:34.049294 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:10:34 crc kubenswrapper[4765]: I1210 07:10:34.049800 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:10:34 crc kubenswrapper[4765]: I1210 07:10:34.651380 4765 generic.go:334] "Generic (PLEG): container finished" podID="02840a32-864e-4d6a-9deb-fffc5397653b" containerID="d4796b17aa0ab49644757eca2fad5bcfb573b8c40ef4c6b4b78436f38bd76f16" exitCode=0 Dec 10 07:10:34 crc kubenswrapper[4765]: I1210 07:10:34.652538 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02840a32-864e-4d6a-9deb-fffc5397653b","Type":"ContainerDied","Data":"d4796b17aa0ab49644757eca2fad5bcfb573b8c40ef4c6b4b78436f38bd76f16"} Dec 10 07:10:35 crc kubenswrapper[4765]: I1210 07:10:35.006834 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 10 07:10:35 crc kubenswrapper[4765]: I1210 07:10:35.011388 4765 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 10 07:10:35 crc kubenswrapper[4765]: I1210 07:10:35.043208 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 10 07:10:35 crc kubenswrapper[4765]: I1210 07:10:35.677648 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-5pfzm" event={"ID":"f346b982-1aa1-4398-8a1d-7171c8b9c0e4","Type":"ContainerStarted","Data":"cb48d7619048e7367222e91b29eb3e701b0b9b6088b3e223019ff2e827511da1"} Dec 10 07:10:35 crc kubenswrapper[4765]: I1210 07:10:35.702211 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-5pfzm" podStartSLOduration=1.9129331889999999 podStartE2EDuration="11.702179552s" podCreationTimestamp="2025-12-10 07:10:24 +0000 UTC" firstStartedPulling="2025-12-10 07:10:25.427226791 +0000 UTC m=+1345.153892107" lastFinishedPulling="2025-12-10 07:10:35.216473154 +0000 UTC m=+1354.943138470" observedRunningTime="2025-12-10 07:10:35.699946948 +0000 UTC m=+1355.426612274" watchObservedRunningTime="2025-12-10 07:10:35.702179552 +0000 UTC m=+1355.428844878" Dec 10 07:10:36 crc kubenswrapper[4765]: I1210 07:10:36.039616 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 10 07:10:36 crc kubenswrapper[4765]: I1210 07:10:36.040459 4765 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 10 07:10:36 crc kubenswrapper[4765]: I1210 07:10:36.135925 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 10 07:10:48 crc kubenswrapper[4765]: I1210 07:10:48.836971 4765 generic.go:334] "Generic (PLEG): container finished" podID="f346b982-1aa1-4398-8a1d-7171c8b9c0e4" containerID="cb48d7619048e7367222e91b29eb3e701b0b9b6088b3e223019ff2e827511da1" exitCode=0 Dec 10 07:10:48 crc kubenswrapper[4765]: I1210 07:10:48.838058 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-5pfzm" event={"ID":"f346b982-1aa1-4398-8a1d-7171c8b9c0e4","Type":"ContainerDied","Data":"cb48d7619048e7367222e91b29eb3e701b0b9b6088b3e223019ff2e827511da1"} Dec 10 07:10:49 crc kubenswrapper[4765]: I1210 07:10:49.809944 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="02840a32-864e-4d6a-9deb-fffc5397653b" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 10 07:10:50 crc kubenswrapper[4765]: I1210 07:10:50.341124 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-5pfzm" Dec 10 07:10:50 crc kubenswrapper[4765]: I1210 07:10:50.463284 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-scripts\") pod \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\" (UID: \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\") " Dec 10 07:10:50 crc kubenswrapper[4765]: I1210 07:10:50.463341 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-combined-ca-bundle\") pod \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\" (UID: \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\") " Dec 10 07:10:50 crc kubenswrapper[4765]: I1210 07:10:50.463416 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-config-data\") pod \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\" (UID: \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\") " Dec 10 07:10:50 crc kubenswrapper[4765]: I1210 07:10:50.463491 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5j82\" (UniqueName: \"kubernetes.io/projected/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-kube-api-access-d5j82\") pod \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\" (UID: \"f346b982-1aa1-4398-8a1d-7171c8b9c0e4\") " Dec 10 07:10:50 crc kubenswrapper[4765]: I1210 07:10:50.469664 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-scripts" (OuterVolumeSpecName: "scripts") pod "f346b982-1aa1-4398-8a1d-7171c8b9c0e4" (UID: "f346b982-1aa1-4398-8a1d-7171c8b9c0e4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:50 crc kubenswrapper[4765]: I1210 07:10:50.470944 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-kube-api-access-d5j82" (OuterVolumeSpecName: "kube-api-access-d5j82") pod "f346b982-1aa1-4398-8a1d-7171c8b9c0e4" (UID: "f346b982-1aa1-4398-8a1d-7171c8b9c0e4"). InnerVolumeSpecName "kube-api-access-d5j82". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:10:50 crc kubenswrapper[4765]: I1210 07:10:50.496028 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f346b982-1aa1-4398-8a1d-7171c8b9c0e4" (UID: "f346b982-1aa1-4398-8a1d-7171c8b9c0e4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:50 crc kubenswrapper[4765]: I1210 07:10:50.496643 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-config-data" (OuterVolumeSpecName: "config-data") pod "f346b982-1aa1-4398-8a1d-7171c8b9c0e4" (UID: "f346b982-1aa1-4398-8a1d-7171c8b9c0e4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:50 crc kubenswrapper[4765]: I1210 07:10:50.565810 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:50 crc kubenswrapper[4765]: I1210 07:10:50.565844 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:50 crc kubenswrapper[4765]: I1210 07:10:50.565856 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:50 crc kubenswrapper[4765]: I1210 07:10:50.565865 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5j82\" (UniqueName: \"kubernetes.io/projected/f346b982-1aa1-4398-8a1d-7171c8b9c0e4-kube-api-access-d5j82\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:50 crc kubenswrapper[4765]: I1210 07:10:50.859730 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-5pfzm" event={"ID":"f346b982-1aa1-4398-8a1d-7171c8b9c0e4","Type":"ContainerDied","Data":"dd0135d0ebd3a4289c90235829cbf9c8ab4cd7946984822ed719af6271f1944c"} Dec 10 07:10:50 crc kubenswrapper[4765]: I1210 07:10:50.860058 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd0135d0ebd3a4289c90235829cbf9c8ab4cd7946984822ed719af6271f1944c" Dec 10 07:10:50 crc kubenswrapper[4765]: I1210 07:10:50.859769 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-5pfzm" Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.036878 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 10 07:10:51 crc kubenswrapper[4765]: E1210 07:10:51.037362 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f346b982-1aa1-4398-8a1d-7171c8b9c0e4" containerName="nova-cell0-conductor-db-sync" Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.037381 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f346b982-1aa1-4398-8a1d-7171c8b9c0e4" containerName="nova-cell0-conductor-db-sync" Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.037609 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="f346b982-1aa1-4398-8a1d-7171c8b9c0e4" containerName="nova-cell0-conductor-db-sync" Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.038347 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.041170 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-7j8ws" Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.041612 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.047377 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.179070 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nths2\" (UniqueName: \"kubernetes.io/projected/03838926-8208-43dc-9bfd-6af312a938a4-kube-api-access-nths2\") pod \"nova-cell0-conductor-0\" (UID: \"03838926-8208-43dc-9bfd-6af312a938a4\") " pod="openstack/nova-cell0-conductor-0" Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.179151 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03838926-8208-43dc-9bfd-6af312a938a4-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"03838926-8208-43dc-9bfd-6af312a938a4\") " pod="openstack/nova-cell0-conductor-0" Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.180453 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03838926-8208-43dc-9bfd-6af312a938a4-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"03838926-8208-43dc-9bfd-6af312a938a4\") " pod="openstack/nova-cell0-conductor-0" Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.282117 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nths2\" (UniqueName: \"kubernetes.io/projected/03838926-8208-43dc-9bfd-6af312a938a4-kube-api-access-nths2\") pod \"nova-cell0-conductor-0\" (UID: \"03838926-8208-43dc-9bfd-6af312a938a4\") " pod="openstack/nova-cell0-conductor-0" Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.282170 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03838926-8208-43dc-9bfd-6af312a938a4-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"03838926-8208-43dc-9bfd-6af312a938a4\") " pod="openstack/nova-cell0-conductor-0" Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.282215 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03838926-8208-43dc-9bfd-6af312a938a4-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"03838926-8208-43dc-9bfd-6af312a938a4\") " pod="openstack/nova-cell0-conductor-0" Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.286461 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03838926-8208-43dc-9bfd-6af312a938a4-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"03838926-8208-43dc-9bfd-6af312a938a4\") " pod="openstack/nova-cell0-conductor-0" Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.289483 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03838926-8208-43dc-9bfd-6af312a938a4-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"03838926-8208-43dc-9bfd-6af312a938a4\") " pod="openstack/nova-cell0-conductor-0" Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.297681 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nths2\" (UniqueName: \"kubernetes.io/projected/03838926-8208-43dc-9bfd-6af312a938a4-kube-api-access-nths2\") pod \"nova-cell0-conductor-0\" (UID: \"03838926-8208-43dc-9bfd-6af312a938a4\") " pod="openstack/nova-cell0-conductor-0" Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.356827 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.640964 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.872159 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"03838926-8208-43dc-9bfd-6af312a938a4","Type":"ContainerStarted","Data":"e60c3598bd56315a17f528317b4f8384d07efe59462616bb913d1726a981a607"} Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.872220 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"03838926-8208-43dc-9bfd-6af312a938a4","Type":"ContainerStarted","Data":"6059940bd5580565536056565d4b076a9a034400dafc036086624ad23a9614e6"} Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.872402 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 10 07:10:51 crc kubenswrapper[4765]: I1210 07:10:51.893792 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=0.893766756 podStartE2EDuration="893.766756ms" podCreationTimestamp="2025-12-10 07:10:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:10:51.891424299 +0000 UTC m=+1371.618089635" watchObservedRunningTime="2025-12-10 07:10:51.893766756 +0000 UTC m=+1371.620432082" Dec 10 07:10:55 crc kubenswrapper[4765]: I1210 07:10:55.913976 4765 generic.go:334] "Generic (PLEG): container finished" podID="02840a32-864e-4d6a-9deb-fffc5397653b" containerID="232bf85fe1bdb2e2bc1a622edd4c663477d5437f7f353d0c4045fd6875f71b2b" exitCode=137 Dec 10 07:10:55 crc kubenswrapper[4765]: I1210 07:10:55.914184 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02840a32-864e-4d6a-9deb-fffc5397653b","Type":"ContainerDied","Data":"232bf85fe1bdb2e2bc1a622edd4c663477d5437f7f353d0c4045fd6875f71b2b"} Dec 10 07:10:55 crc kubenswrapper[4765]: I1210 07:10:55.914878 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02840a32-864e-4d6a-9deb-fffc5397653b","Type":"ContainerDied","Data":"64e2e065e7ad7b8630559cde076b64494854ebe7197042671ca73b5dbb248b91"} Dec 10 07:10:55 crc kubenswrapper[4765]: I1210 07:10:55.914914 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64e2e065e7ad7b8630559cde076b64494854ebe7197042671ca73b5dbb248b91" Dec 10 07:10:55 crc kubenswrapper[4765]: I1210 07:10:55.961232 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.099643 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-combined-ca-bundle\") pod \"02840a32-864e-4d6a-9deb-fffc5397653b\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.099730 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02840a32-864e-4d6a-9deb-fffc5397653b-log-httpd\") pod \"02840a32-864e-4d6a-9deb-fffc5397653b\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.099811 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02840a32-864e-4d6a-9deb-fffc5397653b-run-httpd\") pod \"02840a32-864e-4d6a-9deb-fffc5397653b\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.099847 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-config-data\") pod \"02840a32-864e-4d6a-9deb-fffc5397653b\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.099880 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-scripts\") pod \"02840a32-864e-4d6a-9deb-fffc5397653b\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.099935 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q447s\" (UniqueName: \"kubernetes.io/projected/02840a32-864e-4d6a-9deb-fffc5397653b-kube-api-access-q447s\") pod \"02840a32-864e-4d6a-9deb-fffc5397653b\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.100048 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-ceilometer-tls-certs\") pod \"02840a32-864e-4d6a-9deb-fffc5397653b\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.100120 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-sg-core-conf-yaml\") pod \"02840a32-864e-4d6a-9deb-fffc5397653b\" (UID: \"02840a32-864e-4d6a-9deb-fffc5397653b\") " Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.100620 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02840a32-864e-4d6a-9deb-fffc5397653b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "02840a32-864e-4d6a-9deb-fffc5397653b" (UID: "02840a32-864e-4d6a-9deb-fffc5397653b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.100799 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02840a32-864e-4d6a-9deb-fffc5397653b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "02840a32-864e-4d6a-9deb-fffc5397653b" (UID: "02840a32-864e-4d6a-9deb-fffc5397653b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.106677 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-scripts" (OuterVolumeSpecName: "scripts") pod "02840a32-864e-4d6a-9deb-fffc5397653b" (UID: "02840a32-864e-4d6a-9deb-fffc5397653b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.107493 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02840a32-864e-4d6a-9deb-fffc5397653b-kube-api-access-q447s" (OuterVolumeSpecName: "kube-api-access-q447s") pod "02840a32-864e-4d6a-9deb-fffc5397653b" (UID: "02840a32-864e-4d6a-9deb-fffc5397653b"). InnerVolumeSpecName "kube-api-access-q447s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.128658 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "02840a32-864e-4d6a-9deb-fffc5397653b" (UID: "02840a32-864e-4d6a-9deb-fffc5397653b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.157276 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "02840a32-864e-4d6a-9deb-fffc5397653b" (UID: "02840a32-864e-4d6a-9deb-fffc5397653b"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.191257 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02840a32-864e-4d6a-9deb-fffc5397653b" (UID: "02840a32-864e-4d6a-9deb-fffc5397653b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.202069 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.202114 4765 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02840a32-864e-4d6a-9deb-fffc5397653b-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.202125 4765 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02840a32-864e-4d6a-9deb-fffc5397653b-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.202133 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.202142 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q447s\" (UniqueName: \"kubernetes.io/projected/02840a32-864e-4d6a-9deb-fffc5397653b-kube-api-access-q447s\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.202153 4765 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.202161 4765 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.203554 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-config-data" (OuterVolumeSpecName: "config-data") pod "02840a32-864e-4d6a-9deb-fffc5397653b" (UID: "02840a32-864e-4d6a-9deb-fffc5397653b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.303868 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02840a32-864e-4d6a-9deb-fffc5397653b-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.385704 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.852323 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-q9kp5"] Dec 10 07:10:56 crc kubenswrapper[4765]: E1210 07:10:56.852794 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02840a32-864e-4d6a-9deb-fffc5397653b" containerName="ceilometer-notification-agent" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.852809 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="02840a32-864e-4d6a-9deb-fffc5397653b" containerName="ceilometer-notification-agent" Dec 10 07:10:56 crc kubenswrapper[4765]: E1210 07:10:56.852826 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02840a32-864e-4d6a-9deb-fffc5397653b" containerName="ceilometer-central-agent" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.852834 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="02840a32-864e-4d6a-9deb-fffc5397653b" containerName="ceilometer-central-agent" Dec 10 07:10:56 crc kubenswrapper[4765]: E1210 07:10:56.852845 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02840a32-864e-4d6a-9deb-fffc5397653b" containerName="proxy-httpd" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.852851 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="02840a32-864e-4d6a-9deb-fffc5397653b" containerName="proxy-httpd" Dec 10 07:10:56 crc kubenswrapper[4765]: E1210 07:10:56.852864 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02840a32-864e-4d6a-9deb-fffc5397653b" containerName="sg-core" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.852870 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="02840a32-864e-4d6a-9deb-fffc5397653b" containerName="sg-core" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.853045 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="02840a32-864e-4d6a-9deb-fffc5397653b" containerName="ceilometer-notification-agent" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.853065 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="02840a32-864e-4d6a-9deb-fffc5397653b" containerName="sg-core" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.853078 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="02840a32-864e-4d6a-9deb-fffc5397653b" containerName="ceilometer-central-agent" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.853101 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="02840a32-864e-4d6a-9deb-fffc5397653b" containerName="proxy-httpd" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.853776 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-q9kp5" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.855949 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.859807 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.868600 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-q9kp5"] Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.928479 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.974060 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:10:56 crc kubenswrapper[4765]: I1210 07:10:56.996046 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.007237 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.011485 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.014027 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.014660 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.018058 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.038709 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4493c86d-6e67-409c-84f8-7285522e1580-config-data\") pod \"nova-cell0-cell-mapping-q9kp5\" (UID: \"4493c86d-6e67-409c-84f8-7285522e1580\") " pod="openstack/nova-cell0-cell-mapping-q9kp5" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.038753 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.038777 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4493c86d-6e67-409c-84f8-7285522e1580-scripts\") pod \"nova-cell0-cell-mapping-q9kp5\" (UID: \"4493c86d-6e67-409c-84f8-7285522e1580\") " pod="openstack/nova-cell0-cell-mapping-q9kp5" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.038837 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqg6g\" (UniqueName: \"kubernetes.io/projected/4493c86d-6e67-409c-84f8-7285522e1580-kube-api-access-hqg6g\") pod \"nova-cell0-cell-mapping-q9kp5\" (UID: \"4493c86d-6e67-409c-84f8-7285522e1580\") " pod="openstack/nova-cell0-cell-mapping-q9kp5" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.038859 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.038880 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-scripts\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.038927 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-config-data\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.038959 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb32e2e3-4e02-451f-9516-20da84ab8f6f-log-httpd\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.038980 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ws6fc\" (UniqueName: \"kubernetes.io/projected/cb32e2e3-4e02-451f-9516-20da84ab8f6f-kube-api-access-ws6fc\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.039023 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.039043 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4493c86d-6e67-409c-84f8-7285522e1580-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-q9kp5\" (UID: \"4493c86d-6e67-409c-84f8-7285522e1580\") " pod="openstack/nova-cell0-cell-mapping-q9kp5" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.039067 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb32e2e3-4e02-451f-9516-20da84ab8f6f-run-httpd\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.040759 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.119122 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.120643 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.126564 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.135495 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.137856 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.142359 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.142422 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4493c86d-6e67-409c-84f8-7285522e1580-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-q9kp5\" (UID: \"4493c86d-6e67-409c-84f8-7285522e1580\") " pod="openstack/nova-cell0-cell-mapping-q9kp5" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.142470 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb32e2e3-4e02-451f-9516-20da84ab8f6f-run-httpd\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.142494 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4493c86d-6e67-409c-84f8-7285522e1580-config-data\") pod \"nova-cell0-cell-mapping-q9kp5\" (UID: \"4493c86d-6e67-409c-84f8-7285522e1580\") " pod="openstack/nova-cell0-cell-mapping-q9kp5" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.142518 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.142539 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4493c86d-6e67-409c-84f8-7285522e1580-scripts\") pod \"nova-cell0-cell-mapping-q9kp5\" (UID: \"4493c86d-6e67-409c-84f8-7285522e1580\") " pod="openstack/nova-cell0-cell-mapping-q9kp5" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.142617 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqg6g\" (UniqueName: \"kubernetes.io/projected/4493c86d-6e67-409c-84f8-7285522e1580-kube-api-access-hqg6g\") pod \"nova-cell0-cell-mapping-q9kp5\" (UID: \"4493c86d-6e67-409c-84f8-7285522e1580\") " pod="openstack/nova-cell0-cell-mapping-q9kp5" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.142651 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.142682 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-scripts\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.142771 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-config-data\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.142841 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb32e2e3-4e02-451f-9516-20da84ab8f6f-log-httpd\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.142878 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ws6fc\" (UniqueName: \"kubernetes.io/projected/cb32e2e3-4e02-451f-9516-20da84ab8f6f-kube-api-access-ws6fc\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.148891 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb32e2e3-4e02-451f-9516-20da84ab8f6f-run-httpd\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.150173 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb32e2e3-4e02-451f-9516-20da84ab8f6f-log-httpd\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.150768 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.157657 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.176040 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.177010 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.178355 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.181113 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.184628 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4493c86d-6e67-409c-84f8-7285522e1580-scripts\") pod \"nova-cell0-cell-mapping-q9kp5\" (UID: \"4493c86d-6e67-409c-84f8-7285522e1580\") " pod="openstack/nova-cell0-cell-mapping-q9kp5" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.186884 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ws6fc\" (UniqueName: \"kubernetes.io/projected/cb32e2e3-4e02-451f-9516-20da84ab8f6f-kube-api-access-ws6fc\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.187820 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4493c86d-6e67-409c-84f8-7285522e1580-config-data\") pod \"nova-cell0-cell-mapping-q9kp5\" (UID: \"4493c86d-6e67-409c-84f8-7285522e1580\") " pod="openstack/nova-cell0-cell-mapping-q9kp5" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.187860 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-scripts\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.193798 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.194864 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4493c86d-6e67-409c-84f8-7285522e1580-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-q9kp5\" (UID: \"4493c86d-6e67-409c-84f8-7285522e1580\") " pod="openstack/nova-cell0-cell-mapping-q9kp5" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.194980 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-config-data\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.201271 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.213639 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqg6g\" (UniqueName: \"kubernetes.io/projected/4493c86d-6e67-409c-84f8-7285522e1580-kube-api-access-hqg6g\") pod \"nova-cell0-cell-mapping-q9kp5\" (UID: \"4493c86d-6e67-409c-84f8-7285522e1580\") " pod="openstack/nova-cell0-cell-mapping-q9kp5" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.231171 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.249041 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4709941-5585-494c-9b66-cfe4334ac9a6-logs\") pod \"nova-api-0\" (UID: \"b4709941-5585-494c-9b66-cfe4334ac9a6\") " pod="openstack/nova-api-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.249139 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4709941-5585-494c-9b66-cfe4334ac9a6-config-data\") pod \"nova-api-0\" (UID: \"b4709941-5585-494c-9b66-cfe4334ac9a6\") " pod="openstack/nova-api-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.249241 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0159e6d-87d4-4050-b843-2f4c4087d850-logs\") pod \"nova-metadata-0\" (UID: \"e0159e6d-87d4-4050-b843-2f4c4087d850\") " pod="openstack/nova-metadata-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.249324 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nkht\" (UniqueName: \"kubernetes.io/projected/e0159e6d-87d4-4050-b843-2f4c4087d850-kube-api-access-7nkht\") pod \"nova-metadata-0\" (UID: \"e0159e6d-87d4-4050-b843-2f4c4087d850\") " pod="openstack/nova-metadata-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.249398 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2rg6\" (UniqueName: \"kubernetes.io/projected/b4709941-5585-494c-9b66-cfe4334ac9a6-kube-api-access-s2rg6\") pod \"nova-api-0\" (UID: \"b4709941-5585-494c-9b66-cfe4334ac9a6\") " pod="openstack/nova-api-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.249490 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0159e6d-87d4-4050-b843-2f4c4087d850-config-data\") pod \"nova-metadata-0\" (UID: \"e0159e6d-87d4-4050-b843-2f4c4087d850\") " pod="openstack/nova-metadata-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.249562 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0159e6d-87d4-4050-b843-2f4c4087d850-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e0159e6d-87d4-4050-b843-2f4c4087d850\") " pod="openstack/nova-metadata-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.249626 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4709941-5585-494c-9b66-cfe4334ac9a6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b4709941-5585-494c-9b66-cfe4334ac9a6\") " pod="openstack/nova-api-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.286765 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.355517 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nkht\" (UniqueName: \"kubernetes.io/projected/e0159e6d-87d4-4050-b843-2f4c4087d850-kube-api-access-7nkht\") pod \"nova-metadata-0\" (UID: \"e0159e6d-87d4-4050-b843-2f4c4087d850\") " pod="openstack/nova-metadata-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.355572 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2rg6\" (UniqueName: \"kubernetes.io/projected/b4709941-5585-494c-9b66-cfe4334ac9a6-kube-api-access-s2rg6\") pod \"nova-api-0\" (UID: \"b4709941-5585-494c-9b66-cfe4334ac9a6\") " pod="openstack/nova-api-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.355618 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0159e6d-87d4-4050-b843-2f4c4087d850-config-data\") pod \"nova-metadata-0\" (UID: \"e0159e6d-87d4-4050-b843-2f4c4087d850\") " pod="openstack/nova-metadata-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.355647 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0159e6d-87d4-4050-b843-2f4c4087d850-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e0159e6d-87d4-4050-b843-2f4c4087d850\") " pod="openstack/nova-metadata-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.355668 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eb4872c-24d8-4311-95b3-a38f16c907ad-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1eb4872c-24d8-4311-95b3-a38f16c907ad\") " pod="openstack/nova-scheduler-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.355880 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5dh4\" (UniqueName: \"kubernetes.io/projected/1eb4872c-24d8-4311-95b3-a38f16c907ad-kube-api-access-g5dh4\") pod \"nova-scheduler-0\" (UID: \"1eb4872c-24d8-4311-95b3-a38f16c907ad\") " pod="openstack/nova-scheduler-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.355901 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4709941-5585-494c-9b66-cfe4334ac9a6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b4709941-5585-494c-9b66-cfe4334ac9a6\") " pod="openstack/nova-api-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.355941 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4709941-5585-494c-9b66-cfe4334ac9a6-logs\") pod \"nova-api-0\" (UID: \"b4709941-5585-494c-9b66-cfe4334ac9a6\") " pod="openstack/nova-api-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.355962 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eb4872c-24d8-4311-95b3-a38f16c907ad-config-data\") pod \"nova-scheduler-0\" (UID: \"1eb4872c-24d8-4311-95b3-a38f16c907ad\") " pod="openstack/nova-scheduler-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.355982 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4709941-5585-494c-9b66-cfe4334ac9a6-config-data\") pod \"nova-api-0\" (UID: \"b4709941-5585-494c-9b66-cfe4334ac9a6\") " pod="openstack/nova-api-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.356020 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0159e6d-87d4-4050-b843-2f4c4087d850-logs\") pod \"nova-metadata-0\" (UID: \"e0159e6d-87d4-4050-b843-2f4c4087d850\") " pod="openstack/nova-metadata-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.357152 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4709941-5585-494c-9b66-cfe4334ac9a6-logs\") pod \"nova-api-0\" (UID: \"b4709941-5585-494c-9b66-cfe4334ac9a6\") " pod="openstack/nova-api-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.357749 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0159e6d-87d4-4050-b843-2f4c4087d850-logs\") pod \"nova-metadata-0\" (UID: \"e0159e6d-87d4-4050-b843-2f4c4087d850\") " pod="openstack/nova-metadata-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.360927 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0159e6d-87d4-4050-b843-2f4c4087d850-config-data\") pod \"nova-metadata-0\" (UID: \"e0159e6d-87d4-4050-b843-2f4c4087d850\") " pod="openstack/nova-metadata-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.377762 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0159e6d-87d4-4050-b843-2f4c4087d850-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e0159e6d-87d4-4050-b843-2f4c4087d850\") " pod="openstack/nova-metadata-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.377786 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4709941-5585-494c-9b66-cfe4334ac9a6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b4709941-5585-494c-9b66-cfe4334ac9a6\") " pod="openstack/nova-api-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.385712 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2rg6\" (UniqueName: \"kubernetes.io/projected/b4709941-5585-494c-9b66-cfe4334ac9a6-kube-api-access-s2rg6\") pod \"nova-api-0\" (UID: \"b4709941-5585-494c-9b66-cfe4334ac9a6\") " pod="openstack/nova-api-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.395178 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.396822 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.401595 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.402153 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.407005 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4709941-5585-494c-9b66-cfe4334ac9a6-config-data\") pod \"nova-api-0\" (UID: \"b4709941-5585-494c-9b66-cfe4334ac9a6\") " pod="openstack/nova-api-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.437586 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.441897 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nkht\" (UniqueName: \"kubernetes.io/projected/e0159e6d-87d4-4050-b843-2f4c4087d850-kube-api-access-7nkht\") pod \"nova-metadata-0\" (UID: \"e0159e6d-87d4-4050-b843-2f4c4087d850\") " pod="openstack/nova-metadata-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.442907 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.448801 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bf969cb77-2nqww"] Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.452261 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.458278 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eb4872c-24d8-4311-95b3-a38f16c907ad-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1eb4872c-24d8-4311-95b3-a38f16c907ad\") " pod="openstack/nova-scheduler-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.458328 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5dh4\" (UniqueName: \"kubernetes.io/projected/1eb4872c-24d8-4311-95b3-a38f16c907ad-kube-api-access-g5dh4\") pod \"nova-scheduler-0\" (UID: \"1eb4872c-24d8-4311-95b3-a38f16c907ad\") " pod="openstack/nova-scheduler-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.458382 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eb4872c-24d8-4311-95b3-a38f16c907ad-config-data\") pod \"nova-scheduler-0\" (UID: \"1eb4872c-24d8-4311-95b3-a38f16c907ad\") " pod="openstack/nova-scheduler-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.466378 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bf969cb77-2nqww"] Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.491954 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eb4872c-24d8-4311-95b3-a38f16c907ad-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1eb4872c-24d8-4311-95b3-a38f16c907ad\") " pod="openstack/nova-scheduler-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.494444 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eb4872c-24d8-4311-95b3-a38f16c907ad-config-data\") pod \"nova-scheduler-0\" (UID: \"1eb4872c-24d8-4311-95b3-a38f16c907ad\") " pod="openstack/nova-scheduler-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.495147 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-q9kp5" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.516393 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5dh4\" (UniqueName: \"kubernetes.io/projected/1eb4872c-24d8-4311-95b3-a38f16c907ad-kube-api-access-g5dh4\") pod \"nova-scheduler-0\" (UID: \"1eb4872c-24d8-4311-95b3-a38f16c907ad\") " pod="openstack/nova-scheduler-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.559762 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9l82\" (UniqueName: \"kubernetes.io/projected/64681b0b-abc7-459f-858a-a1e8a8ec168c-kube-api-access-g9l82\") pod \"dnsmasq-dns-bf969cb77-2nqww\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.559842 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-dns-swift-storage-0\") pod \"dnsmasq-dns-bf969cb77-2nqww\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.559870 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8e00936-c644-4afc-9d00-1c6da1f5a380-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8e00936-c644-4afc-9d00-1c6da1f5a380\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.559906 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-ovsdbserver-sb\") pod \"dnsmasq-dns-bf969cb77-2nqww\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.559928 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-ovsdbserver-nb\") pod \"dnsmasq-dns-bf969cb77-2nqww\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.559951 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bcnf\" (UniqueName: \"kubernetes.io/projected/a8e00936-c644-4afc-9d00-1c6da1f5a380-kube-api-access-9bcnf\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8e00936-c644-4afc-9d00-1c6da1f5a380\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.559980 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-config\") pod \"dnsmasq-dns-bf969cb77-2nqww\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.560039 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-dns-svc\") pod \"dnsmasq-dns-bf969cb77-2nqww\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.560059 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8e00936-c644-4afc-9d00-1c6da1f5a380-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8e00936-c644-4afc-9d00-1c6da1f5a380\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.644906 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.656690 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.665476 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-dns-swift-storage-0\") pod \"dnsmasq-dns-bf969cb77-2nqww\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.665538 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8e00936-c644-4afc-9d00-1c6da1f5a380-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8e00936-c644-4afc-9d00-1c6da1f5a380\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.665596 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-ovsdbserver-sb\") pod \"dnsmasq-dns-bf969cb77-2nqww\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.665627 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-ovsdbserver-nb\") pod \"dnsmasq-dns-bf969cb77-2nqww\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.665663 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bcnf\" (UniqueName: \"kubernetes.io/projected/a8e00936-c644-4afc-9d00-1c6da1f5a380-kube-api-access-9bcnf\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8e00936-c644-4afc-9d00-1c6da1f5a380\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.665704 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-config\") pod \"dnsmasq-dns-bf969cb77-2nqww\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.665805 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-dns-svc\") pod \"dnsmasq-dns-bf969cb77-2nqww\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.665838 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8e00936-c644-4afc-9d00-1c6da1f5a380-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8e00936-c644-4afc-9d00-1c6da1f5a380\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.665903 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9l82\" (UniqueName: \"kubernetes.io/projected/64681b0b-abc7-459f-858a-a1e8a8ec168c-kube-api-access-g9l82\") pod \"dnsmasq-dns-bf969cb77-2nqww\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.667403 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-ovsdbserver-nb\") pod \"dnsmasq-dns-bf969cb77-2nqww\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.668002 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-ovsdbserver-sb\") pod \"dnsmasq-dns-bf969cb77-2nqww\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.668624 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-dns-svc\") pod \"dnsmasq-dns-bf969cb77-2nqww\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.673319 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-dns-swift-storage-0\") pod \"dnsmasq-dns-bf969cb77-2nqww\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.684757 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-config\") pod \"dnsmasq-dns-bf969cb77-2nqww\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.699893 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8e00936-c644-4afc-9d00-1c6da1f5a380-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8e00936-c644-4afc-9d00-1c6da1f5a380\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.700864 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9l82\" (UniqueName: \"kubernetes.io/projected/64681b0b-abc7-459f-858a-a1e8a8ec168c-kube-api-access-g9l82\") pod \"dnsmasq-dns-bf969cb77-2nqww\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.704539 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8e00936-c644-4afc-9d00-1c6da1f5a380-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8e00936-c644-4afc-9d00-1c6da1f5a380\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.705261 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bcnf\" (UniqueName: \"kubernetes.io/projected/a8e00936-c644-4afc-9d00-1c6da1f5a380-kube-api-access-9bcnf\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8e00936-c644-4afc-9d00-1c6da1f5a380\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.821548 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:10:57 crc kubenswrapper[4765]: I1210 07:10:57.829566 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.208819 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.347900 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.573235 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.584645 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-q9kp5"] Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.606022 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02840a32-864e-4d6a-9deb-fffc5397653b" path="/var/lib/kubelet/pods/02840a32-864e-4d6a-9deb-fffc5397653b/volumes" Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.757099 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-9r7cb"] Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.758510 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-9r7cb" Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.763564 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.765732 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.806686 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-9r7cb"] Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.832011 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bf969cb77-2nqww"] Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.862893 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.946465 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-9r7cb\" (UID: \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\") " pod="openstack/nova-cell1-conductor-db-sync-9r7cb" Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.946542 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-scripts\") pod \"nova-cell1-conductor-db-sync-9r7cb\" (UID: \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\") " pod="openstack/nova-cell1-conductor-db-sync-9r7cb" Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.946569 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-config-data\") pod \"nova-cell1-conductor-db-sync-9r7cb\" (UID: \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\") " pod="openstack/nova-cell1-conductor-db-sync-9r7cb" Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.946657 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxpcr\" (UniqueName: \"kubernetes.io/projected/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-kube-api-access-lxpcr\") pod \"nova-cell1-conductor-db-sync-9r7cb\" (UID: \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\") " pod="openstack/nova-cell1-conductor-db-sync-9r7cb" Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.964859 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.971519 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb32e2e3-4e02-451f-9516-20da84ab8f6f","Type":"ContainerStarted","Data":"ce4a6f3ade1f7c385785d89572ec8ac0e7b9df851a9527479d8144cb61d9db0a"} Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.976764 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bf969cb77-2nqww" event={"ID":"64681b0b-abc7-459f-858a-a1e8a8ec168c","Type":"ContainerStarted","Data":"f17d874c91934404d2dede1c11b760032753b5b2dd81cd24d527434cf38a5740"} Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.982272 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-q9kp5" event={"ID":"4493c86d-6e67-409c-84f8-7285522e1580","Type":"ContainerStarted","Data":"9d9b5a141c2983815efdaebc1eb8fd5678d1eca528232df16cb4e7e0e7ca55f0"} Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.982415 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-q9kp5" event={"ID":"4493c86d-6e67-409c-84f8-7285522e1580","Type":"ContainerStarted","Data":"8ceec89de41cdf1c42c49fd1cc553770cbcd567fe575687acd728681e7d9f798"} Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.987038 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b4709941-5585-494c-9b66-cfe4334ac9a6","Type":"ContainerStarted","Data":"c1c060e1ba435f3f4e2fff5d41db183314d01c51855d2c206b8003bfaa97abbb"} Dec 10 07:10:58 crc kubenswrapper[4765]: I1210 07:10:58.993683 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1eb4872c-24d8-4311-95b3-a38f16c907ad","Type":"ContainerStarted","Data":"d63323c68093c3b3b43d8ed9fe0fe6b69e0f2107671935951f81a24b9ef72fa5"} Dec 10 07:10:59 crc kubenswrapper[4765]: I1210 07:10:59.004037 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a8e00936-c644-4afc-9d00-1c6da1f5a380","Type":"ContainerStarted","Data":"2419aa350ab987474156a796524b3fb26a9cfe9f0d442e4943c5ae93e4903d0e"} Dec 10 07:10:59 crc kubenswrapper[4765]: I1210 07:10:59.010108 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-q9kp5" podStartSLOduration=3.010075379 podStartE2EDuration="3.010075379s" podCreationTimestamp="2025-12-10 07:10:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:10:58.997879952 +0000 UTC m=+1378.724545268" watchObservedRunningTime="2025-12-10 07:10:59.010075379 +0000 UTC m=+1378.736740695" Dec 10 07:10:59 crc kubenswrapper[4765]: I1210 07:10:59.048076 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-9r7cb\" (UID: \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\") " pod="openstack/nova-cell1-conductor-db-sync-9r7cb" Dec 10 07:10:59 crc kubenswrapper[4765]: I1210 07:10:59.048182 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-scripts\") pod \"nova-cell1-conductor-db-sync-9r7cb\" (UID: \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\") " pod="openstack/nova-cell1-conductor-db-sync-9r7cb" Dec 10 07:10:59 crc kubenswrapper[4765]: I1210 07:10:59.048213 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-config-data\") pod \"nova-cell1-conductor-db-sync-9r7cb\" (UID: \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\") " pod="openstack/nova-cell1-conductor-db-sync-9r7cb" Dec 10 07:10:59 crc kubenswrapper[4765]: I1210 07:10:59.048295 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxpcr\" (UniqueName: \"kubernetes.io/projected/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-kube-api-access-lxpcr\") pod \"nova-cell1-conductor-db-sync-9r7cb\" (UID: \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\") " pod="openstack/nova-cell1-conductor-db-sync-9r7cb" Dec 10 07:10:59 crc kubenswrapper[4765]: I1210 07:10:59.053111 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-9r7cb\" (UID: \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\") " pod="openstack/nova-cell1-conductor-db-sync-9r7cb" Dec 10 07:10:59 crc kubenswrapper[4765]: I1210 07:10:59.053346 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-scripts\") pod \"nova-cell1-conductor-db-sync-9r7cb\" (UID: \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\") " pod="openstack/nova-cell1-conductor-db-sync-9r7cb" Dec 10 07:10:59 crc kubenswrapper[4765]: I1210 07:10:59.056785 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-config-data\") pod \"nova-cell1-conductor-db-sync-9r7cb\" (UID: \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\") " pod="openstack/nova-cell1-conductor-db-sync-9r7cb" Dec 10 07:10:59 crc kubenswrapper[4765]: I1210 07:10:59.069306 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxpcr\" (UniqueName: \"kubernetes.io/projected/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-kube-api-access-lxpcr\") pod \"nova-cell1-conductor-db-sync-9r7cb\" (UID: \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\") " pod="openstack/nova-cell1-conductor-db-sync-9r7cb" Dec 10 07:10:59 crc kubenswrapper[4765]: I1210 07:10:59.173527 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-9r7cb" Dec 10 07:10:59 crc kubenswrapper[4765]: I1210 07:10:59.710314 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-9r7cb"] Dec 10 07:11:00 crc kubenswrapper[4765]: I1210 07:11:00.032487 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb32e2e3-4e02-451f-9516-20da84ab8f6f","Type":"ContainerStarted","Data":"c22d77d19f3e8a8901f085bd7349437f4040978e0b9c0a92681d5964ecfb7acd"} Dec 10 07:11:00 crc kubenswrapper[4765]: I1210 07:11:00.039923 4765 generic.go:334] "Generic (PLEG): container finished" podID="64681b0b-abc7-459f-858a-a1e8a8ec168c" containerID="5b696a757ba6596f45612b80c4727d93793afe545b90da475c2245fe1f81eb6c" exitCode=0 Dec 10 07:11:00 crc kubenswrapper[4765]: I1210 07:11:00.040013 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bf969cb77-2nqww" event={"ID":"64681b0b-abc7-459f-858a-a1e8a8ec168c","Type":"ContainerDied","Data":"5b696a757ba6596f45612b80c4727d93793afe545b90da475c2245fe1f81eb6c"} Dec 10 07:11:00 crc kubenswrapper[4765]: I1210 07:11:00.046874 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-9r7cb" event={"ID":"64581618-c8c5-4c6c-8c7f-59d8dc4150ec","Type":"ContainerStarted","Data":"67adac67b5496f9aee627d199b71d4d334bcca427efc1eca057a56f8870abe0a"} Dec 10 07:11:00 crc kubenswrapper[4765]: I1210 07:11:00.055157 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e0159e6d-87d4-4050-b843-2f4c4087d850","Type":"ContainerStarted","Data":"788642f8cece139641bf53191b3e8f97abfca8a8a917f0cfcba0a80545a58e43"} Dec 10 07:11:01 crc kubenswrapper[4765]: I1210 07:11:01.081683 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb32e2e3-4e02-451f-9516-20da84ab8f6f","Type":"ContainerStarted","Data":"d9b93f663c97cdb81bfac4efadfe9be275d574685cef8d6fd4150ba5d80bbf90"} Dec 10 07:11:01 crc kubenswrapper[4765]: I1210 07:11:01.089245 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bf969cb77-2nqww" event={"ID":"64681b0b-abc7-459f-858a-a1e8a8ec168c","Type":"ContainerStarted","Data":"ffa9d3523866a55382d9fc919655059179659d86fd060a8be0e8fc5c216d66a7"} Dec 10 07:11:01 crc kubenswrapper[4765]: I1210 07:11:01.090420 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:11:01 crc kubenswrapper[4765]: I1210 07:11:01.099734 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-9r7cb" event={"ID":"64581618-c8c5-4c6c-8c7f-59d8dc4150ec","Type":"ContainerStarted","Data":"c6917a3810c494b02e66b935f5dd67320cea45af7e76831145b9736da74e9429"} Dec 10 07:11:01 crc kubenswrapper[4765]: I1210 07:11:01.123586 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bf969cb77-2nqww" podStartSLOduration=4.123568264 podStartE2EDuration="4.123568264s" podCreationTimestamp="2025-12-10 07:10:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:11:01.116884394 +0000 UTC m=+1380.843549710" watchObservedRunningTime="2025-12-10 07:11:01.123568264 +0000 UTC m=+1380.850233580" Dec 10 07:11:01 crc kubenswrapper[4765]: I1210 07:11:01.139612 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-9r7cb" podStartSLOduration=3.13958052 podStartE2EDuration="3.13958052s" podCreationTimestamp="2025-12-10 07:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:11:01.131190311 +0000 UTC m=+1380.857855647" watchObservedRunningTime="2025-12-10 07:11:01.13958052 +0000 UTC m=+1380.866245836" Dec 10 07:11:02 crc kubenswrapper[4765]: I1210 07:11:02.181833 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 07:11:02 crc kubenswrapper[4765]: I1210 07:11:02.211722 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 07:11:04 crc kubenswrapper[4765]: I1210 07:11:04.049489 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:11:04 crc kubenswrapper[4765]: I1210 07:11:04.049928 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:11:04 crc kubenswrapper[4765]: I1210 07:11:04.049981 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 07:11:04 crc kubenswrapper[4765]: I1210 07:11:04.050810 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1c86e2badb0bfc9f0eb664aadfc0af5a709c20f5327fa62e0a9911a7da8c407c"} pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 07:11:04 crc kubenswrapper[4765]: I1210 07:11:04.050863 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" containerID="cri-o://1c86e2badb0bfc9f0eb664aadfc0af5a709c20f5327fa62e0a9911a7da8c407c" gracePeriod=600 Dec 10 07:11:05 crc kubenswrapper[4765]: I1210 07:11:05.172868 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e0159e6d-87d4-4050-b843-2f4c4087d850","Type":"ContainerStarted","Data":"1639a7228c14e55d4c7a6fe3e77e7f3c51a5d803c9dbc29f520b7dfbc16b7754"} Dec 10 07:11:05 crc kubenswrapper[4765]: I1210 07:11:05.173695 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e0159e6d-87d4-4050-b843-2f4c4087d850","Type":"ContainerStarted","Data":"0b11bc6c16650d5dd48ff9dc9f180dfda9e34308176ebbeea7a34c70c707f924"} Dec 10 07:11:05 crc kubenswrapper[4765]: I1210 07:11:05.173977 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e0159e6d-87d4-4050-b843-2f4c4087d850" containerName="nova-metadata-log" containerID="cri-o://0b11bc6c16650d5dd48ff9dc9f180dfda9e34308176ebbeea7a34c70c707f924" gracePeriod=30 Dec 10 07:11:05 crc kubenswrapper[4765]: I1210 07:11:05.174387 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e0159e6d-87d4-4050-b843-2f4c4087d850" containerName="nova-metadata-metadata" containerID="cri-o://1639a7228c14e55d4c7a6fe3e77e7f3c51a5d803c9dbc29f520b7dfbc16b7754" gracePeriod=30 Dec 10 07:11:05 crc kubenswrapper[4765]: I1210 07:11:05.180906 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb32e2e3-4e02-451f-9516-20da84ab8f6f","Type":"ContainerStarted","Data":"9b41f82c1855e8c2b81da8542fe24345f5a9c8ac84ec1d97c75c80956a103a44"} Dec 10 07:11:05 crc kubenswrapper[4765]: I1210 07:11:05.196705 4765 generic.go:334] "Generic (PLEG): container finished" podID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerID="1c86e2badb0bfc9f0eb664aadfc0af5a709c20f5327fa62e0a9911a7da8c407c" exitCode=0 Dec 10 07:11:05 crc kubenswrapper[4765]: I1210 07:11:05.196889 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerDied","Data":"1c86e2badb0bfc9f0eb664aadfc0af5a709c20f5327fa62e0a9911a7da8c407c"} Dec 10 07:11:05 crc kubenswrapper[4765]: I1210 07:11:05.196940 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083"} Dec 10 07:11:05 crc kubenswrapper[4765]: I1210 07:11:05.196968 4765 scope.go:117] "RemoveContainer" containerID="8f3d21f8c3fe011f6de37bb9b8fe365dd62e648f60edb80df7c37bb446ad83d1" Dec 10 07:11:05 crc kubenswrapper[4765]: I1210 07:11:05.206766 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.142454244 podStartE2EDuration="8.206736522s" podCreationTimestamp="2025-12-10 07:10:57 +0000 UTC" firstStartedPulling="2025-12-10 07:10:58.976876225 +0000 UTC m=+1378.703541541" lastFinishedPulling="2025-12-10 07:11:04.041158503 +0000 UTC m=+1383.767823819" observedRunningTime="2025-12-10 07:11:05.199917868 +0000 UTC m=+1384.926583194" watchObservedRunningTime="2025-12-10 07:11:05.206736522 +0000 UTC m=+1384.933401838" Dec 10 07:11:05 crc kubenswrapper[4765]: I1210 07:11:05.210374 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b4709941-5585-494c-9b66-cfe4334ac9a6","Type":"ContainerStarted","Data":"c42cd48bca5bac59fd8814f1b30b25640172692be41556b469c0cd9f8bb9a5a9"} Dec 10 07:11:05 crc kubenswrapper[4765]: I1210 07:11:05.225691 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1eb4872c-24d8-4311-95b3-a38f16c907ad","Type":"ContainerStarted","Data":"c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37"} Dec 10 07:11:05 crc kubenswrapper[4765]: I1210 07:11:05.232579 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a8e00936-c644-4afc-9d00-1c6da1f5a380","Type":"ContainerStarted","Data":"df242b7c160deac47abd6b9541c5e53d3fd8d19239aa5942bb1b6bc35af53e76"} Dec 10 07:11:05 crc kubenswrapper[4765]: I1210 07:11:05.232831 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="a8e00936-c644-4afc-9d00-1c6da1f5a380" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://df242b7c160deac47abd6b9541c5e53d3fd8d19239aa5942bb1b6bc35af53e76" gracePeriod=30 Dec 10 07:11:05 crc kubenswrapper[4765]: I1210 07:11:05.251504 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.54577867 podStartE2EDuration="8.251479965s" podCreationTimestamp="2025-12-10 07:10:57 +0000 UTC" firstStartedPulling="2025-12-10 07:10:58.337477756 +0000 UTC m=+1378.064143072" lastFinishedPulling="2025-12-10 07:11:04.043179051 +0000 UTC m=+1383.769844367" observedRunningTime="2025-12-10 07:11:05.241775929 +0000 UTC m=+1384.968441245" watchObservedRunningTime="2025-12-10 07:11:05.251479965 +0000 UTC m=+1384.978145281" Dec 10 07:11:05 crc kubenswrapper[4765]: I1210 07:11:05.286132 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.833174837 podStartE2EDuration="8.28610318s" podCreationTimestamp="2025-12-10 07:10:57 +0000 UTC" firstStartedPulling="2025-12-10 07:10:58.575051095 +0000 UTC m=+1378.301716411" lastFinishedPulling="2025-12-10 07:11:04.027979438 +0000 UTC m=+1383.754644754" observedRunningTime="2025-12-10 07:11:05.264254098 +0000 UTC m=+1384.990919414" watchObservedRunningTime="2025-12-10 07:11:05.28610318 +0000 UTC m=+1385.012768496" Dec 10 07:11:05 crc kubenswrapper[4765]: I1210 07:11:05.311529 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.055522431 podStartE2EDuration="8.311505722s" podCreationTimestamp="2025-12-10 07:10:57 +0000 UTC" firstStartedPulling="2025-12-10 07:10:58.786298694 +0000 UTC m=+1378.512964010" lastFinishedPulling="2025-12-10 07:11:04.042281985 +0000 UTC m=+1383.768947301" observedRunningTime="2025-12-10 07:11:05.310616057 +0000 UTC m=+1385.037281373" watchObservedRunningTime="2025-12-10 07:11:05.311505722 +0000 UTC m=+1385.038171038" Dec 10 07:11:06 crc kubenswrapper[4765]: I1210 07:11:06.246831 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b4709941-5585-494c-9b66-cfe4334ac9a6","Type":"ContainerStarted","Data":"56617d3540237254bcc0cf30bc765a67e9884a279392fe0f2c5c8d8d2a62c35b"} Dec 10 07:11:06 crc kubenswrapper[4765]: I1210 07:11:06.251004 4765 generic.go:334] "Generic (PLEG): container finished" podID="e0159e6d-87d4-4050-b843-2f4c4087d850" containerID="0b11bc6c16650d5dd48ff9dc9f180dfda9e34308176ebbeea7a34c70c707f924" exitCode=143 Dec 10 07:11:06 crc kubenswrapper[4765]: I1210 07:11:06.251100 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e0159e6d-87d4-4050-b843-2f4c4087d850","Type":"ContainerDied","Data":"0b11bc6c16650d5dd48ff9dc9f180dfda9e34308176ebbeea7a34c70c707f924"} Dec 10 07:11:07 crc kubenswrapper[4765]: I1210 07:11:07.264539 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb32e2e3-4e02-451f-9516-20da84ab8f6f","Type":"ContainerStarted","Data":"25937422f4ef3a630b37ce488a01e4ab3527e4324465f68ce84ad08278152714"} Dec 10 07:11:07 crc kubenswrapper[4765]: I1210 07:11:07.444716 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 07:11:07 crc kubenswrapper[4765]: I1210 07:11:07.444815 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 07:11:07 crc kubenswrapper[4765]: I1210 07:11:07.646139 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 10 07:11:07 crc kubenswrapper[4765]: I1210 07:11:07.646210 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 10 07:11:07 crc kubenswrapper[4765]: I1210 07:11:07.657616 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 10 07:11:07 crc kubenswrapper[4765]: I1210 07:11:07.657664 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 10 07:11:07 crc kubenswrapper[4765]: I1210 07:11:07.705783 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 10 07:11:07 crc kubenswrapper[4765]: I1210 07:11:07.724980 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.70713413 podStartE2EDuration="11.72496208s" podCreationTimestamp="2025-12-10 07:10:56 +0000 UTC" firstStartedPulling="2025-12-10 07:10:58.244853751 +0000 UTC m=+1377.971519077" lastFinishedPulling="2025-12-10 07:11:06.262681701 +0000 UTC m=+1385.989347027" observedRunningTime="2025-12-10 07:11:07.293831295 +0000 UTC m=+1387.020496611" watchObservedRunningTime="2025-12-10 07:11:07.72496208 +0000 UTC m=+1387.451627396" Dec 10 07:11:07 crc kubenswrapper[4765]: I1210 07:11:07.822619 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:07 crc kubenswrapper[4765]: I1210 07:11:07.831354 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:11:07 crc kubenswrapper[4765]: I1210 07:11:07.909104 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn"] Dec 10 07:11:07 crc kubenswrapper[4765]: I1210 07:11:07.909340 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" podUID="3e62750c-eec4-43f8-afb4-8f8d8e794247" containerName="dnsmasq-dns" containerID="cri-o://fd5e20ca82ef15db6b1e7c06f3847cce05ab0c21f77382df7a18465a87dd8baa" gracePeriod=10 Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.280750 4765 generic.go:334] "Generic (PLEG): container finished" podID="3e62750c-eec4-43f8-afb4-8f8d8e794247" containerID="fd5e20ca82ef15db6b1e7c06f3847cce05ab0c21f77382df7a18465a87dd8baa" exitCode=0 Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.281867 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" event={"ID":"3e62750c-eec4-43f8-afb4-8f8d8e794247","Type":"ContainerDied","Data":"fd5e20ca82ef15db6b1e7c06f3847cce05ab0c21f77382df7a18465a87dd8baa"} Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.284603 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.328486 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.529282 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b4709941-5585-494c-9b66-cfe4334ac9a6" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.184:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.529328 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b4709941-5585-494c-9b66-cfe4334ac9a6" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.184:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.658435 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.776837 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-ovsdbserver-nb\") pod \"3e62750c-eec4-43f8-afb4-8f8d8e794247\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.776906 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-dns-svc\") pod \"3e62750c-eec4-43f8-afb4-8f8d8e794247\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.776946 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-dns-swift-storage-0\") pod \"3e62750c-eec4-43f8-afb4-8f8d8e794247\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.776981 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xkhk\" (UniqueName: \"kubernetes.io/projected/3e62750c-eec4-43f8-afb4-8f8d8e794247-kube-api-access-5xkhk\") pod \"3e62750c-eec4-43f8-afb4-8f8d8e794247\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.777002 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-config\") pod \"3e62750c-eec4-43f8-afb4-8f8d8e794247\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.777161 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-ovsdbserver-sb\") pod \"3e62750c-eec4-43f8-afb4-8f8d8e794247\" (UID: \"3e62750c-eec4-43f8-afb4-8f8d8e794247\") " Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.802829 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e62750c-eec4-43f8-afb4-8f8d8e794247-kube-api-access-5xkhk" (OuterVolumeSpecName: "kube-api-access-5xkhk") pod "3e62750c-eec4-43f8-afb4-8f8d8e794247" (UID: "3e62750c-eec4-43f8-afb4-8f8d8e794247"). InnerVolumeSpecName "kube-api-access-5xkhk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.862947 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3e62750c-eec4-43f8-afb4-8f8d8e794247" (UID: "3e62750c-eec4-43f8-afb4-8f8d8e794247"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.873183 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3e62750c-eec4-43f8-afb4-8f8d8e794247" (UID: "3e62750c-eec4-43f8-afb4-8f8d8e794247"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.876740 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-config" (OuterVolumeSpecName: "config") pod "3e62750c-eec4-43f8-afb4-8f8d8e794247" (UID: "3e62750c-eec4-43f8-afb4-8f8d8e794247"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.879869 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.880359 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.880501 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xkhk\" (UniqueName: \"kubernetes.io/projected/3e62750c-eec4-43f8-afb4-8f8d8e794247-kube-api-access-5xkhk\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.880603 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.881316 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3e62750c-eec4-43f8-afb4-8f8d8e794247" (UID: "3e62750c-eec4-43f8-afb4-8f8d8e794247"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.883307 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3e62750c-eec4-43f8-afb4-8f8d8e794247" (UID: "3e62750c-eec4-43f8-afb4-8f8d8e794247"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.983213 4765 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:08 crc kubenswrapper[4765]: I1210 07:11:08.983259 4765 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e62750c-eec4-43f8-afb4-8f8d8e794247-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:09 crc kubenswrapper[4765]: I1210 07:11:09.290607 4765 generic.go:334] "Generic (PLEG): container finished" podID="4493c86d-6e67-409c-84f8-7285522e1580" containerID="9d9b5a141c2983815efdaebc1eb8fd5678d1eca528232df16cb4e7e0e7ca55f0" exitCode=0 Dec 10 07:11:09 crc kubenswrapper[4765]: I1210 07:11:09.290694 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-q9kp5" event={"ID":"4493c86d-6e67-409c-84f8-7285522e1580","Type":"ContainerDied","Data":"9d9b5a141c2983815efdaebc1eb8fd5678d1eca528232df16cb4e7e0e7ca55f0"} Dec 10 07:11:09 crc kubenswrapper[4765]: I1210 07:11:09.292622 4765 generic.go:334] "Generic (PLEG): container finished" podID="64581618-c8c5-4c6c-8c7f-59d8dc4150ec" containerID="c6917a3810c494b02e66b935f5dd67320cea45af7e76831145b9736da74e9429" exitCode=0 Dec 10 07:11:09 crc kubenswrapper[4765]: I1210 07:11:09.292683 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-9r7cb" event={"ID":"64581618-c8c5-4c6c-8c7f-59d8dc4150ec","Type":"ContainerDied","Data":"c6917a3810c494b02e66b935f5dd67320cea45af7e76831145b9736da74e9429"} Dec 10 07:11:09 crc kubenswrapper[4765]: I1210 07:11:09.294624 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" event={"ID":"3e62750c-eec4-43f8-afb4-8f8d8e794247","Type":"ContainerDied","Data":"4850f5ce5db4f67f2747bd41d8a10c093188f984445e2f8caf8ff670fadc21db"} Dec 10 07:11:09 crc kubenswrapper[4765]: I1210 07:11:09.294699 4765 scope.go:117] "RemoveContainer" containerID="fd5e20ca82ef15db6b1e7c06f3847cce05ab0c21f77382df7a18465a87dd8baa" Dec 10 07:11:09 crc kubenswrapper[4765]: I1210 07:11:09.294666 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn" Dec 10 07:11:09 crc kubenswrapper[4765]: I1210 07:11:09.331177 4765 scope.go:117] "RemoveContainer" containerID="bd38afdebf4473fd3cd8da183475f7625bd16ec0139b00453a5468d1230f11a3" Dec 10 07:11:09 crc kubenswrapper[4765]: I1210 07:11:09.380132 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn"] Dec 10 07:11:09 crc kubenswrapper[4765]: I1210 07:11:09.397077 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fcdc9f4bf-cpfgn"] Dec 10 07:11:10 crc kubenswrapper[4765]: I1210 07:11:10.609373 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e62750c-eec4-43f8-afb4-8f8d8e794247" path="/var/lib/kubelet/pods/3e62750c-eec4-43f8-afb4-8f8d8e794247/volumes" Dec 10 07:11:10 crc kubenswrapper[4765]: I1210 07:11:10.840142 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-q9kp5" Dec 10 07:11:10 crc kubenswrapper[4765]: I1210 07:11:10.855255 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-9r7cb" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.027613 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-scripts\") pod \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\" (UID: \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\") " Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.028743 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqg6g\" (UniqueName: \"kubernetes.io/projected/4493c86d-6e67-409c-84f8-7285522e1580-kube-api-access-hqg6g\") pod \"4493c86d-6e67-409c-84f8-7285522e1580\" (UID: \"4493c86d-6e67-409c-84f8-7285522e1580\") " Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.028786 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4493c86d-6e67-409c-84f8-7285522e1580-scripts\") pod \"4493c86d-6e67-409c-84f8-7285522e1580\" (UID: \"4493c86d-6e67-409c-84f8-7285522e1580\") " Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.028890 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4493c86d-6e67-409c-84f8-7285522e1580-combined-ca-bundle\") pod \"4493c86d-6e67-409c-84f8-7285522e1580\" (UID: \"4493c86d-6e67-409c-84f8-7285522e1580\") " Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.028988 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-combined-ca-bundle\") pod \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\" (UID: \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\") " Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.029023 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4493c86d-6e67-409c-84f8-7285522e1580-config-data\") pod \"4493c86d-6e67-409c-84f8-7285522e1580\" (UID: \"4493c86d-6e67-409c-84f8-7285522e1580\") " Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.029185 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-config-data\") pod \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\" (UID: \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\") " Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.029271 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxpcr\" (UniqueName: \"kubernetes.io/projected/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-kube-api-access-lxpcr\") pod \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\" (UID: \"64581618-c8c5-4c6c-8c7f-59d8dc4150ec\") " Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.034286 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4493c86d-6e67-409c-84f8-7285522e1580-kube-api-access-hqg6g" (OuterVolumeSpecName: "kube-api-access-hqg6g") pod "4493c86d-6e67-409c-84f8-7285522e1580" (UID: "4493c86d-6e67-409c-84f8-7285522e1580"). InnerVolumeSpecName "kube-api-access-hqg6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.034406 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-scripts" (OuterVolumeSpecName: "scripts") pod "64581618-c8c5-4c6c-8c7f-59d8dc4150ec" (UID: "64581618-c8c5-4c6c-8c7f-59d8dc4150ec"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.034807 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-kube-api-access-lxpcr" (OuterVolumeSpecName: "kube-api-access-lxpcr") pod "64581618-c8c5-4c6c-8c7f-59d8dc4150ec" (UID: "64581618-c8c5-4c6c-8c7f-59d8dc4150ec"). InnerVolumeSpecName "kube-api-access-lxpcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.035229 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4493c86d-6e67-409c-84f8-7285522e1580-scripts" (OuterVolumeSpecName: "scripts") pod "4493c86d-6e67-409c-84f8-7285522e1580" (UID: "4493c86d-6e67-409c-84f8-7285522e1580"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.058191 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-config-data" (OuterVolumeSpecName: "config-data") pod "64581618-c8c5-4c6c-8c7f-59d8dc4150ec" (UID: "64581618-c8c5-4c6c-8c7f-59d8dc4150ec"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.071460 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4493c86d-6e67-409c-84f8-7285522e1580-config-data" (OuterVolumeSpecName: "config-data") pod "4493c86d-6e67-409c-84f8-7285522e1580" (UID: "4493c86d-6e67-409c-84f8-7285522e1580"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.071614 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4493c86d-6e67-409c-84f8-7285522e1580-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4493c86d-6e67-409c-84f8-7285522e1580" (UID: "4493c86d-6e67-409c-84f8-7285522e1580"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.071247 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64581618-c8c5-4c6c-8c7f-59d8dc4150ec" (UID: "64581618-c8c5-4c6c-8c7f-59d8dc4150ec"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.138351 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.138397 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4493c86d-6e67-409c-84f8-7285522e1580-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.138406 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.138415 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxpcr\" (UniqueName: \"kubernetes.io/projected/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-kube-api-access-lxpcr\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.138426 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64581618-c8c5-4c6c-8c7f-59d8dc4150ec-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.138436 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqg6g\" (UniqueName: \"kubernetes.io/projected/4493c86d-6e67-409c-84f8-7285522e1580-kube-api-access-hqg6g\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.138445 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4493c86d-6e67-409c-84f8-7285522e1580-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.138453 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4493c86d-6e67-409c-84f8-7285522e1580-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.317020 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-q9kp5" event={"ID":"4493c86d-6e67-409c-84f8-7285522e1580","Type":"ContainerDied","Data":"8ceec89de41cdf1c42c49fd1cc553770cbcd567fe575687acd728681e7d9f798"} Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.317061 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ceec89de41cdf1c42c49fd1cc553770cbcd567fe575687acd728681e7d9f798" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.317057 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-q9kp5" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.332298 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-9r7cb" event={"ID":"64581618-c8c5-4c6c-8c7f-59d8dc4150ec","Type":"ContainerDied","Data":"67adac67b5496f9aee627d199b71d4d334bcca427efc1eca057a56f8870abe0a"} Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.332336 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67adac67b5496f9aee627d199b71d4d334bcca427efc1eca057a56f8870abe0a" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.332375 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-9r7cb" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.419348 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 10 07:11:11 crc kubenswrapper[4765]: E1210 07:11:11.419855 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4493c86d-6e67-409c-84f8-7285522e1580" containerName="nova-manage" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.419879 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="4493c86d-6e67-409c-84f8-7285522e1580" containerName="nova-manage" Dec 10 07:11:11 crc kubenswrapper[4765]: E1210 07:11:11.419902 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e62750c-eec4-43f8-afb4-8f8d8e794247" containerName="init" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.419911 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e62750c-eec4-43f8-afb4-8f8d8e794247" containerName="init" Dec 10 07:11:11 crc kubenswrapper[4765]: E1210 07:11:11.419937 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64581618-c8c5-4c6c-8c7f-59d8dc4150ec" containerName="nova-cell1-conductor-db-sync" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.419944 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="64581618-c8c5-4c6c-8c7f-59d8dc4150ec" containerName="nova-cell1-conductor-db-sync" Dec 10 07:11:11 crc kubenswrapper[4765]: E1210 07:11:11.419958 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e62750c-eec4-43f8-afb4-8f8d8e794247" containerName="dnsmasq-dns" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.419965 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e62750c-eec4-43f8-afb4-8f8d8e794247" containerName="dnsmasq-dns" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.420228 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="4493c86d-6e67-409c-84f8-7285522e1580" containerName="nova-manage" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.420251 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e62750c-eec4-43f8-afb4-8f8d8e794247" containerName="dnsmasq-dns" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.420271 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="64581618-c8c5-4c6c-8c7f-59d8dc4150ec" containerName="nova-cell1-conductor-db-sync" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.420997 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.425445 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.435873 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.531300 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.531622 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b4709941-5585-494c-9b66-cfe4334ac9a6" containerName="nova-api-log" containerID="cri-o://c42cd48bca5bac59fd8814f1b30b25640172692be41556b469c0cd9f8bb9a5a9" gracePeriod=30 Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.531769 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b4709941-5585-494c-9b66-cfe4334ac9a6" containerName="nova-api-api" containerID="cri-o://56617d3540237254bcc0cf30bc765a67e9884a279392fe0f2c5c8d8d2a62c35b" gracePeriod=30 Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.541968 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.542197 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="1eb4872c-24d8-4311-95b3-a38f16c907ad" containerName="nova-scheduler-scheduler" containerID="cri-o://c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37" gracePeriod=30 Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.546213 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7694f523-adf7-4964-b475-6cd94cac7d75-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7694f523-adf7-4964-b475-6cd94cac7d75\") " pod="openstack/nova-cell1-conductor-0" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.546381 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2c26\" (UniqueName: \"kubernetes.io/projected/7694f523-adf7-4964-b475-6cd94cac7d75-kube-api-access-c2c26\") pod \"nova-cell1-conductor-0\" (UID: \"7694f523-adf7-4964-b475-6cd94cac7d75\") " pod="openstack/nova-cell1-conductor-0" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.546477 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7694f523-adf7-4964-b475-6cd94cac7d75-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7694f523-adf7-4964-b475-6cd94cac7d75\") " pod="openstack/nova-cell1-conductor-0" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.648879 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7694f523-adf7-4964-b475-6cd94cac7d75-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7694f523-adf7-4964-b475-6cd94cac7d75\") " pod="openstack/nova-cell1-conductor-0" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.649017 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2c26\" (UniqueName: \"kubernetes.io/projected/7694f523-adf7-4964-b475-6cd94cac7d75-kube-api-access-c2c26\") pod \"nova-cell1-conductor-0\" (UID: \"7694f523-adf7-4964-b475-6cd94cac7d75\") " pod="openstack/nova-cell1-conductor-0" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.649063 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7694f523-adf7-4964-b475-6cd94cac7d75-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7694f523-adf7-4964-b475-6cd94cac7d75\") " pod="openstack/nova-cell1-conductor-0" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.657629 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7694f523-adf7-4964-b475-6cd94cac7d75-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7694f523-adf7-4964-b475-6cd94cac7d75\") " pod="openstack/nova-cell1-conductor-0" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.671978 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7694f523-adf7-4964-b475-6cd94cac7d75-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7694f523-adf7-4964-b475-6cd94cac7d75\") " pod="openstack/nova-cell1-conductor-0" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.677331 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2c26\" (UniqueName: \"kubernetes.io/projected/7694f523-adf7-4964-b475-6cd94cac7d75-kube-api-access-c2c26\") pod \"nova-cell1-conductor-0\" (UID: \"7694f523-adf7-4964-b475-6cd94cac7d75\") " pod="openstack/nova-cell1-conductor-0" Dec 10 07:11:11 crc kubenswrapper[4765]: I1210 07:11:11.743572 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 10 07:11:12 crc kubenswrapper[4765]: I1210 07:11:12.385610 4765 generic.go:334] "Generic (PLEG): container finished" podID="b4709941-5585-494c-9b66-cfe4334ac9a6" containerID="c42cd48bca5bac59fd8814f1b30b25640172692be41556b469c0cd9f8bb9a5a9" exitCode=143 Dec 10 07:11:12 crc kubenswrapper[4765]: I1210 07:11:12.386039 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b4709941-5585-494c-9b66-cfe4334ac9a6","Type":"ContainerDied","Data":"c42cd48bca5bac59fd8814f1b30b25640172692be41556b469c0cd9f8bb9a5a9"} Dec 10 07:11:12 crc kubenswrapper[4765]: I1210 07:11:12.435130 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 10 07:11:12 crc kubenswrapper[4765]: E1210 07:11:12.659227 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37 is running failed: container process not found" containerID="c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 10 07:11:12 crc kubenswrapper[4765]: E1210 07:11:12.659702 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37 is running failed: container process not found" containerID="c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 10 07:11:12 crc kubenswrapper[4765]: E1210 07:11:12.661623 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37 is running failed: container process not found" containerID="c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 10 07:11:12 crc kubenswrapper[4765]: E1210 07:11:12.661686 4765 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="1eb4872c-24d8-4311-95b3-a38f16c907ad" containerName="nova-scheduler-scheduler" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.076474 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.185066 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eb4872c-24d8-4311-95b3-a38f16c907ad-config-data\") pod \"1eb4872c-24d8-4311-95b3-a38f16c907ad\" (UID: \"1eb4872c-24d8-4311-95b3-a38f16c907ad\") " Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.185167 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5dh4\" (UniqueName: \"kubernetes.io/projected/1eb4872c-24d8-4311-95b3-a38f16c907ad-kube-api-access-g5dh4\") pod \"1eb4872c-24d8-4311-95b3-a38f16c907ad\" (UID: \"1eb4872c-24d8-4311-95b3-a38f16c907ad\") " Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.185420 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eb4872c-24d8-4311-95b3-a38f16c907ad-combined-ca-bundle\") pod \"1eb4872c-24d8-4311-95b3-a38f16c907ad\" (UID: \"1eb4872c-24d8-4311-95b3-a38f16c907ad\") " Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.191353 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1eb4872c-24d8-4311-95b3-a38f16c907ad-kube-api-access-g5dh4" (OuterVolumeSpecName: "kube-api-access-g5dh4") pod "1eb4872c-24d8-4311-95b3-a38f16c907ad" (UID: "1eb4872c-24d8-4311-95b3-a38f16c907ad"). InnerVolumeSpecName "kube-api-access-g5dh4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.213529 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1eb4872c-24d8-4311-95b3-a38f16c907ad-config-data" (OuterVolumeSpecName: "config-data") pod "1eb4872c-24d8-4311-95b3-a38f16c907ad" (UID: "1eb4872c-24d8-4311-95b3-a38f16c907ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.214776 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1eb4872c-24d8-4311-95b3-a38f16c907ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1eb4872c-24d8-4311-95b3-a38f16c907ad" (UID: "1eb4872c-24d8-4311-95b3-a38f16c907ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.287215 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eb4872c-24d8-4311-95b3-a38f16c907ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.287253 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eb4872c-24d8-4311-95b3-a38f16c907ad-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.287264 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5dh4\" (UniqueName: \"kubernetes.io/projected/1eb4872c-24d8-4311-95b3-a38f16c907ad-kube-api-access-g5dh4\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.396124 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7694f523-adf7-4964-b475-6cd94cac7d75","Type":"ContainerStarted","Data":"b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90"} Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.396173 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7694f523-adf7-4964-b475-6cd94cac7d75","Type":"ContainerStarted","Data":"d13f473a0de67d93c2d59c4d3e99676f932522d547214a58d1d827210a66196b"} Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.396283 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.397752 4765 generic.go:334] "Generic (PLEG): container finished" podID="1eb4872c-24d8-4311-95b3-a38f16c907ad" containerID="c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37" exitCode=0 Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.397781 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1eb4872c-24d8-4311-95b3-a38f16c907ad","Type":"ContainerDied","Data":"c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37"} Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.397802 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1eb4872c-24d8-4311-95b3-a38f16c907ad","Type":"ContainerDied","Data":"d63323c68093c3b3b43d8ed9fe0fe6b69e0f2107671935951f81a24b9ef72fa5"} Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.397821 4765 scope.go:117] "RemoveContainer" containerID="c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.398042 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.433564 4765 scope.go:117] "RemoveContainer" containerID="c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.435197 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.4351763330000002 podStartE2EDuration="2.435176333s" podCreationTimestamp="2025-12-10 07:11:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:11:13.425717664 +0000 UTC m=+1393.152382980" watchObservedRunningTime="2025-12-10 07:11:13.435176333 +0000 UTC m=+1393.161841649" Dec 10 07:11:13 crc kubenswrapper[4765]: E1210 07:11:13.456346 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37\": container with ID starting with c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37 not found: ID does not exist" containerID="c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.456670 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37"} err="failed to get container status \"c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37\": rpc error: code = NotFound desc = could not find container \"c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37\": container with ID starting with c242c9ed0837f76cb223c81ae65b10517304014eadb0c2fbd53eee5cf357ab37 not found: ID does not exist" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.467037 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.483354 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.506511 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 07:11:13 crc kubenswrapper[4765]: E1210 07:11:13.506900 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eb4872c-24d8-4311-95b3-a38f16c907ad" containerName="nova-scheduler-scheduler" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.506920 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eb4872c-24d8-4311-95b3-a38f16c907ad" containerName="nova-scheduler-scheduler" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.507148 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="1eb4872c-24d8-4311-95b3-a38f16c907ad" containerName="nova-scheduler-scheduler" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.507905 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.514746 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.520985 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.696836 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44d47192-50c3-4b82-b4c3-8d5c36888fe4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"44d47192-50c3-4b82-b4c3-8d5c36888fe4\") " pod="openstack/nova-scheduler-0" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.696956 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcnsx\" (UniqueName: \"kubernetes.io/projected/44d47192-50c3-4b82-b4c3-8d5c36888fe4-kube-api-access-wcnsx\") pod \"nova-scheduler-0\" (UID: \"44d47192-50c3-4b82-b4c3-8d5c36888fe4\") " pod="openstack/nova-scheduler-0" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.697130 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44d47192-50c3-4b82-b4c3-8d5c36888fe4-config-data\") pod \"nova-scheduler-0\" (UID: \"44d47192-50c3-4b82-b4c3-8d5c36888fe4\") " pod="openstack/nova-scheduler-0" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.799307 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44d47192-50c3-4b82-b4c3-8d5c36888fe4-config-data\") pod \"nova-scheduler-0\" (UID: \"44d47192-50c3-4b82-b4c3-8d5c36888fe4\") " pod="openstack/nova-scheduler-0" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.799403 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44d47192-50c3-4b82-b4c3-8d5c36888fe4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"44d47192-50c3-4b82-b4c3-8d5c36888fe4\") " pod="openstack/nova-scheduler-0" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.799464 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcnsx\" (UniqueName: \"kubernetes.io/projected/44d47192-50c3-4b82-b4c3-8d5c36888fe4-kube-api-access-wcnsx\") pod \"nova-scheduler-0\" (UID: \"44d47192-50c3-4b82-b4c3-8d5c36888fe4\") " pod="openstack/nova-scheduler-0" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.805187 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44d47192-50c3-4b82-b4c3-8d5c36888fe4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"44d47192-50c3-4b82-b4c3-8d5c36888fe4\") " pod="openstack/nova-scheduler-0" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.805347 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44d47192-50c3-4b82-b4c3-8d5c36888fe4-config-data\") pod \"nova-scheduler-0\" (UID: \"44d47192-50c3-4b82-b4c3-8d5c36888fe4\") " pod="openstack/nova-scheduler-0" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.816580 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcnsx\" (UniqueName: \"kubernetes.io/projected/44d47192-50c3-4b82-b4c3-8d5c36888fe4-kube-api-access-wcnsx\") pod \"nova-scheduler-0\" (UID: \"44d47192-50c3-4b82-b4c3-8d5c36888fe4\") " pod="openstack/nova-scheduler-0" Dec 10 07:11:13 crc kubenswrapper[4765]: I1210 07:11:13.835218 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 07:11:14 crc kubenswrapper[4765]: I1210 07:11:14.336268 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 07:11:14 crc kubenswrapper[4765]: W1210 07:11:14.340364 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod44d47192_50c3_4b82_b4c3_8d5c36888fe4.slice/crio-f5dd8a19133fa98e6e51a882d2c18aae423c16be5d4077c631e6eb8221f51893 WatchSource:0}: Error finding container f5dd8a19133fa98e6e51a882d2c18aae423c16be5d4077c631e6eb8221f51893: Status 404 returned error can't find the container with id f5dd8a19133fa98e6e51a882d2c18aae423c16be5d4077c631e6eb8221f51893 Dec 10 07:11:14 crc kubenswrapper[4765]: I1210 07:11:14.411358 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"44d47192-50c3-4b82-b4c3-8d5c36888fe4","Type":"ContainerStarted","Data":"f5dd8a19133fa98e6e51a882d2c18aae423c16be5d4077c631e6eb8221f51893"} Dec 10 07:11:14 crc kubenswrapper[4765]: I1210 07:11:14.602298 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1eb4872c-24d8-4311-95b3-a38f16c907ad" path="/var/lib/kubelet/pods/1eb4872c-24d8-4311-95b3-a38f16c907ad/volumes" Dec 10 07:11:15 crc kubenswrapper[4765]: I1210 07:11:15.424972 4765 generic.go:334] "Generic (PLEG): container finished" podID="b4709941-5585-494c-9b66-cfe4334ac9a6" containerID="56617d3540237254bcc0cf30bc765a67e9884a279392fe0f2c5c8d8d2a62c35b" exitCode=0 Dec 10 07:11:15 crc kubenswrapper[4765]: I1210 07:11:15.425108 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b4709941-5585-494c-9b66-cfe4334ac9a6","Type":"ContainerDied","Data":"56617d3540237254bcc0cf30bc765a67e9884a279392fe0f2c5c8d8d2a62c35b"} Dec 10 07:11:15 crc kubenswrapper[4765]: I1210 07:11:15.426682 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"44d47192-50c3-4b82-b4c3-8d5c36888fe4","Type":"ContainerStarted","Data":"6ef96d34cd0172617e3243fbef31a88436029b89f059c3a50d3f1b091843a1be"} Dec 10 07:11:15 crc kubenswrapper[4765]: I1210 07:11:15.450677 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.4506596 podStartE2EDuration="2.4506596s" podCreationTimestamp="2025-12-10 07:11:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:11:15.446752559 +0000 UTC m=+1395.173417875" watchObservedRunningTime="2025-12-10 07:11:15.4506596 +0000 UTC m=+1395.177324916" Dec 10 07:11:15 crc kubenswrapper[4765]: I1210 07:11:15.768309 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 07:11:15 crc kubenswrapper[4765]: I1210 07:11:15.959181 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4709941-5585-494c-9b66-cfe4334ac9a6-logs\") pod \"b4709941-5585-494c-9b66-cfe4334ac9a6\" (UID: \"b4709941-5585-494c-9b66-cfe4334ac9a6\") " Dec 10 07:11:15 crc kubenswrapper[4765]: I1210 07:11:15.959276 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2rg6\" (UniqueName: \"kubernetes.io/projected/b4709941-5585-494c-9b66-cfe4334ac9a6-kube-api-access-s2rg6\") pod \"b4709941-5585-494c-9b66-cfe4334ac9a6\" (UID: \"b4709941-5585-494c-9b66-cfe4334ac9a6\") " Dec 10 07:11:15 crc kubenswrapper[4765]: I1210 07:11:15.959421 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4709941-5585-494c-9b66-cfe4334ac9a6-combined-ca-bundle\") pod \"b4709941-5585-494c-9b66-cfe4334ac9a6\" (UID: \"b4709941-5585-494c-9b66-cfe4334ac9a6\") " Dec 10 07:11:15 crc kubenswrapper[4765]: I1210 07:11:15.959477 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4709941-5585-494c-9b66-cfe4334ac9a6-config-data\") pod \"b4709941-5585-494c-9b66-cfe4334ac9a6\" (UID: \"b4709941-5585-494c-9b66-cfe4334ac9a6\") " Dec 10 07:11:15 crc kubenswrapper[4765]: I1210 07:11:15.959838 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4709941-5585-494c-9b66-cfe4334ac9a6-logs" (OuterVolumeSpecName: "logs") pod "b4709941-5585-494c-9b66-cfe4334ac9a6" (UID: "b4709941-5585-494c-9b66-cfe4334ac9a6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:11:15 crc kubenswrapper[4765]: I1210 07:11:15.960202 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4709941-5585-494c-9b66-cfe4334ac9a6-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:15 crc kubenswrapper[4765]: I1210 07:11:15.965678 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4709941-5585-494c-9b66-cfe4334ac9a6-kube-api-access-s2rg6" (OuterVolumeSpecName: "kube-api-access-s2rg6") pod "b4709941-5585-494c-9b66-cfe4334ac9a6" (UID: "b4709941-5585-494c-9b66-cfe4334ac9a6"). InnerVolumeSpecName "kube-api-access-s2rg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:11:15 crc kubenswrapper[4765]: I1210 07:11:15.991242 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4709941-5585-494c-9b66-cfe4334ac9a6-config-data" (OuterVolumeSpecName: "config-data") pod "b4709941-5585-494c-9b66-cfe4334ac9a6" (UID: "b4709941-5585-494c-9b66-cfe4334ac9a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.007119 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4709941-5585-494c-9b66-cfe4334ac9a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4709941-5585-494c-9b66-cfe4334ac9a6" (UID: "b4709941-5585-494c-9b66-cfe4334ac9a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.062601 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4709941-5585-494c-9b66-cfe4334ac9a6-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.062643 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2rg6\" (UniqueName: \"kubernetes.io/projected/b4709941-5585-494c-9b66-cfe4334ac9a6-kube-api-access-s2rg6\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.062655 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4709941-5585-494c-9b66-cfe4334ac9a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.437809 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b4709941-5585-494c-9b66-cfe4334ac9a6","Type":"ContainerDied","Data":"c1c060e1ba435f3f4e2fff5d41db183314d01c51855d2c206b8003bfaa97abbb"} Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.437901 4765 scope.go:117] "RemoveContainer" containerID="56617d3540237254bcc0cf30bc765a67e9884a279392fe0f2c5c8d8d2a62c35b" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.437835 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.460368 4765 scope.go:117] "RemoveContainer" containerID="c42cd48bca5bac59fd8814f1b30b25640172692be41556b469c0cd9f8bb9a5a9" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.486638 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.506425 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.517096 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 10 07:11:16 crc kubenswrapper[4765]: E1210 07:11:16.517598 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4709941-5585-494c-9b66-cfe4334ac9a6" containerName="nova-api-api" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.517619 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4709941-5585-494c-9b66-cfe4334ac9a6" containerName="nova-api-api" Dec 10 07:11:16 crc kubenswrapper[4765]: E1210 07:11:16.517669 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4709941-5585-494c-9b66-cfe4334ac9a6" containerName="nova-api-log" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.517676 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4709941-5585-494c-9b66-cfe4334ac9a6" containerName="nova-api-log" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.517888 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4709941-5585-494c-9b66-cfe4334ac9a6" containerName="nova-api-api" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.517933 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4709941-5585-494c-9b66-cfe4334ac9a6" containerName="nova-api-log" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.519193 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.526408 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.542962 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.603466 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4709941-5585-494c-9b66-cfe4334ac9a6" path="/var/lib/kubelet/pods/b4709941-5585-494c-9b66-cfe4334ac9a6/volumes" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.677471 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2971712d-0322-490c-8c4d-555c5a3a7edc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2971712d-0322-490c-8c4d-555c5a3a7edc\") " pod="openstack/nova-api-0" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.677531 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2971712d-0322-490c-8c4d-555c5a3a7edc-config-data\") pod \"nova-api-0\" (UID: \"2971712d-0322-490c-8c4d-555c5a3a7edc\") " pod="openstack/nova-api-0" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.677552 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77xnw\" (UniqueName: \"kubernetes.io/projected/2971712d-0322-490c-8c4d-555c5a3a7edc-kube-api-access-77xnw\") pod \"nova-api-0\" (UID: \"2971712d-0322-490c-8c4d-555c5a3a7edc\") " pod="openstack/nova-api-0" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.677590 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2971712d-0322-490c-8c4d-555c5a3a7edc-logs\") pod \"nova-api-0\" (UID: \"2971712d-0322-490c-8c4d-555c5a3a7edc\") " pod="openstack/nova-api-0" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.779604 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2971712d-0322-490c-8c4d-555c5a3a7edc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2971712d-0322-490c-8c4d-555c5a3a7edc\") " pod="openstack/nova-api-0" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.779659 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2971712d-0322-490c-8c4d-555c5a3a7edc-config-data\") pod \"nova-api-0\" (UID: \"2971712d-0322-490c-8c4d-555c5a3a7edc\") " pod="openstack/nova-api-0" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.779686 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77xnw\" (UniqueName: \"kubernetes.io/projected/2971712d-0322-490c-8c4d-555c5a3a7edc-kube-api-access-77xnw\") pod \"nova-api-0\" (UID: \"2971712d-0322-490c-8c4d-555c5a3a7edc\") " pod="openstack/nova-api-0" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.779742 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2971712d-0322-490c-8c4d-555c5a3a7edc-logs\") pod \"nova-api-0\" (UID: \"2971712d-0322-490c-8c4d-555c5a3a7edc\") " pod="openstack/nova-api-0" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.782764 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2971712d-0322-490c-8c4d-555c5a3a7edc-logs\") pod \"nova-api-0\" (UID: \"2971712d-0322-490c-8c4d-555c5a3a7edc\") " pod="openstack/nova-api-0" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.785885 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2971712d-0322-490c-8c4d-555c5a3a7edc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2971712d-0322-490c-8c4d-555c5a3a7edc\") " pod="openstack/nova-api-0" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.786050 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2971712d-0322-490c-8c4d-555c5a3a7edc-config-data\") pod \"nova-api-0\" (UID: \"2971712d-0322-490c-8c4d-555c5a3a7edc\") " pod="openstack/nova-api-0" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.806816 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77xnw\" (UniqueName: \"kubernetes.io/projected/2971712d-0322-490c-8c4d-555c5a3a7edc-kube-api-access-77xnw\") pod \"nova-api-0\" (UID: \"2971712d-0322-490c-8c4d-555c5a3a7edc\") " pod="openstack/nova-api-0" Dec 10 07:11:16 crc kubenswrapper[4765]: I1210 07:11:16.868314 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 07:11:17 crc kubenswrapper[4765]: I1210 07:11:17.316943 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:11:17 crc kubenswrapper[4765]: I1210 07:11:17.453500 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2971712d-0322-490c-8c4d-555c5a3a7edc","Type":"ContainerStarted","Data":"31d982dddd9abcf425f4906af15f45fd8416f7ba89bec3437465ccbcfdb4d27a"} Dec 10 07:11:18 crc kubenswrapper[4765]: I1210 07:11:18.464408 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2971712d-0322-490c-8c4d-555c5a3a7edc","Type":"ContainerStarted","Data":"442e596ffc0b66c42c08dfc6ea992b91560c8f009fd692eaf3868dfc8c21968b"} Dec 10 07:11:18 crc kubenswrapper[4765]: I1210 07:11:18.464998 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2971712d-0322-490c-8c4d-555c5a3a7edc","Type":"ContainerStarted","Data":"4b487c7e3be484aba49f17357e68aab511b09c492ad3b1c03178cfad923540c8"} Dec 10 07:11:18 crc kubenswrapper[4765]: I1210 07:11:18.485342 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.4853239990000002 podStartE2EDuration="2.485323999s" podCreationTimestamp="2025-12-10 07:11:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:11:18.480746829 +0000 UTC m=+1398.207412145" watchObservedRunningTime="2025-12-10 07:11:18.485323999 +0000 UTC m=+1398.211989315" Dec 10 07:11:18 crc kubenswrapper[4765]: I1210 07:11:18.835660 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 10 07:11:21 crc kubenswrapper[4765]: I1210 07:11:21.771336 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 10 07:11:23 crc kubenswrapper[4765]: I1210 07:11:23.835879 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 10 07:11:23 crc kubenswrapper[4765]: I1210 07:11:23.866921 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 10 07:11:24 crc kubenswrapper[4765]: I1210 07:11:24.545773 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 10 07:11:26 crc kubenswrapper[4765]: I1210 07:11:26.869927 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 07:11:26 crc kubenswrapper[4765]: I1210 07:11:26.870297 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 07:11:27 crc kubenswrapper[4765]: I1210 07:11:27.413180 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 10 07:11:27 crc kubenswrapper[4765]: I1210 07:11:27.953326 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2971712d-0322-490c-8c4d-555c5a3a7edc" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 07:11:27 crc kubenswrapper[4765]: I1210 07:11:27.953536 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2971712d-0322-490c-8c4d-555c5a3a7edc" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 07:11:35 crc kubenswrapper[4765]: I1210 07:11:35.619037 4765 generic.go:334] "Generic (PLEG): container finished" podID="a8e00936-c644-4afc-9d00-1c6da1f5a380" containerID="df242b7c160deac47abd6b9541c5e53d3fd8d19239aa5942bb1b6bc35af53e76" exitCode=137 Dec 10 07:11:35 crc kubenswrapper[4765]: I1210 07:11:35.619129 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a8e00936-c644-4afc-9d00-1c6da1f5a380","Type":"ContainerDied","Data":"df242b7c160deac47abd6b9541c5e53d3fd8d19239aa5942bb1b6bc35af53e76"} Dec 10 07:11:35 crc kubenswrapper[4765]: I1210 07:11:35.619607 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a8e00936-c644-4afc-9d00-1c6da1f5a380","Type":"ContainerDied","Data":"2419aa350ab987474156a796524b3fb26a9cfe9f0d442e4943c5ae93e4903d0e"} Dec 10 07:11:35 crc kubenswrapper[4765]: I1210 07:11:35.619625 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2419aa350ab987474156a796524b3fb26a9cfe9f0d442e4943c5ae93e4903d0e" Dec 10 07:11:35 crc kubenswrapper[4765]: I1210 07:11:35.621654 4765 generic.go:334] "Generic (PLEG): container finished" podID="e0159e6d-87d4-4050-b843-2f4c4087d850" containerID="1639a7228c14e55d4c7a6fe3e77e7f3c51a5d803c9dbc29f520b7dfbc16b7754" exitCode=137 Dec 10 07:11:35 crc kubenswrapper[4765]: I1210 07:11:35.621695 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e0159e6d-87d4-4050-b843-2f4c4087d850","Type":"ContainerDied","Data":"1639a7228c14e55d4c7a6fe3e77e7f3c51a5d803c9dbc29f520b7dfbc16b7754"} Dec 10 07:11:35 crc kubenswrapper[4765]: I1210 07:11:35.648169 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:35 crc kubenswrapper[4765]: I1210 07:11:35.765014 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8e00936-c644-4afc-9d00-1c6da1f5a380-config-data\") pod \"a8e00936-c644-4afc-9d00-1c6da1f5a380\" (UID: \"a8e00936-c644-4afc-9d00-1c6da1f5a380\") " Dec 10 07:11:35 crc kubenswrapper[4765]: I1210 07:11:35.765168 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8e00936-c644-4afc-9d00-1c6da1f5a380-combined-ca-bundle\") pod \"a8e00936-c644-4afc-9d00-1c6da1f5a380\" (UID: \"a8e00936-c644-4afc-9d00-1c6da1f5a380\") " Dec 10 07:11:35 crc kubenswrapper[4765]: I1210 07:11:35.765254 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bcnf\" (UniqueName: \"kubernetes.io/projected/a8e00936-c644-4afc-9d00-1c6da1f5a380-kube-api-access-9bcnf\") pod \"a8e00936-c644-4afc-9d00-1c6da1f5a380\" (UID: \"a8e00936-c644-4afc-9d00-1c6da1f5a380\") " Dec 10 07:11:35 crc kubenswrapper[4765]: I1210 07:11:35.770994 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8e00936-c644-4afc-9d00-1c6da1f5a380-kube-api-access-9bcnf" (OuterVolumeSpecName: "kube-api-access-9bcnf") pod "a8e00936-c644-4afc-9d00-1c6da1f5a380" (UID: "a8e00936-c644-4afc-9d00-1c6da1f5a380"). InnerVolumeSpecName "kube-api-access-9bcnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:11:35 crc kubenswrapper[4765]: I1210 07:11:35.803272 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8e00936-c644-4afc-9d00-1c6da1f5a380-config-data" (OuterVolumeSpecName: "config-data") pod "a8e00936-c644-4afc-9d00-1c6da1f5a380" (UID: "a8e00936-c644-4afc-9d00-1c6da1f5a380"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:35 crc kubenswrapper[4765]: I1210 07:11:35.804944 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8e00936-c644-4afc-9d00-1c6da1f5a380-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a8e00936-c644-4afc-9d00-1c6da1f5a380" (UID: "a8e00936-c644-4afc-9d00-1c6da1f5a380"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:35 crc kubenswrapper[4765]: I1210 07:11:35.873493 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8e00936-c644-4afc-9d00-1c6da1f5a380-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:35 crc kubenswrapper[4765]: I1210 07:11:35.873529 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8e00936-c644-4afc-9d00-1c6da1f5a380-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:35 crc kubenswrapper[4765]: I1210 07:11:35.873546 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bcnf\" (UniqueName: \"kubernetes.io/projected/a8e00936-c644-4afc-9d00-1c6da1f5a380-kube-api-access-9bcnf\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:35 crc kubenswrapper[4765]: I1210 07:11:35.961806 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.076692 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7nkht\" (UniqueName: \"kubernetes.io/projected/e0159e6d-87d4-4050-b843-2f4c4087d850-kube-api-access-7nkht\") pod \"e0159e6d-87d4-4050-b843-2f4c4087d850\" (UID: \"e0159e6d-87d4-4050-b843-2f4c4087d850\") " Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.077045 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0159e6d-87d4-4050-b843-2f4c4087d850-combined-ca-bundle\") pod \"e0159e6d-87d4-4050-b843-2f4c4087d850\" (UID: \"e0159e6d-87d4-4050-b843-2f4c4087d850\") " Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.077080 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0159e6d-87d4-4050-b843-2f4c4087d850-config-data\") pod \"e0159e6d-87d4-4050-b843-2f4c4087d850\" (UID: \"e0159e6d-87d4-4050-b843-2f4c4087d850\") " Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.077172 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0159e6d-87d4-4050-b843-2f4c4087d850-logs\") pod \"e0159e6d-87d4-4050-b843-2f4c4087d850\" (UID: \"e0159e6d-87d4-4050-b843-2f4c4087d850\") " Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.077898 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0159e6d-87d4-4050-b843-2f4c4087d850-logs" (OuterVolumeSpecName: "logs") pod "e0159e6d-87d4-4050-b843-2f4c4087d850" (UID: "e0159e6d-87d4-4050-b843-2f4c4087d850"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.078259 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0159e6d-87d4-4050-b843-2f4c4087d850-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.080685 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0159e6d-87d4-4050-b843-2f4c4087d850-kube-api-access-7nkht" (OuterVolumeSpecName: "kube-api-access-7nkht") pod "e0159e6d-87d4-4050-b843-2f4c4087d850" (UID: "e0159e6d-87d4-4050-b843-2f4c4087d850"). InnerVolumeSpecName "kube-api-access-7nkht". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.103320 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0159e6d-87d4-4050-b843-2f4c4087d850-config-data" (OuterVolumeSpecName: "config-data") pod "e0159e6d-87d4-4050-b843-2f4c4087d850" (UID: "e0159e6d-87d4-4050-b843-2f4c4087d850"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.108615 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0159e6d-87d4-4050-b843-2f4c4087d850-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e0159e6d-87d4-4050-b843-2f4c4087d850" (UID: "e0159e6d-87d4-4050-b843-2f4c4087d850"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.180357 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7nkht\" (UniqueName: \"kubernetes.io/projected/e0159e6d-87d4-4050-b843-2f4c4087d850-kube-api-access-7nkht\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.180394 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0159e6d-87d4-4050-b843-2f4c4087d850-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.180406 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0159e6d-87d4-4050-b843-2f4c4087d850-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.632200 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e0159e6d-87d4-4050-b843-2f4c4087d850","Type":"ContainerDied","Data":"788642f8cece139641bf53191b3e8f97abfca8a8a917f0cfcba0a80545a58e43"} Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.632252 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.632278 4765 scope.go:117] "RemoveContainer" containerID="1639a7228c14e55d4c7a6fe3e77e7f3c51a5d803c9dbc29f520b7dfbc16b7754" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.632220 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.667165 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.667976 4765 scope.go:117] "RemoveContainer" containerID="0b11bc6c16650d5dd48ff9dc9f180dfda9e34308176ebbeea7a34c70c707f924" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.679512 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.693465 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.709290 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.722414 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 10 07:11:36 crc kubenswrapper[4765]: E1210 07:11:36.722994 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8e00936-c644-4afc-9d00-1c6da1f5a380" containerName="nova-cell1-novncproxy-novncproxy" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.723019 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8e00936-c644-4afc-9d00-1c6da1f5a380" containerName="nova-cell1-novncproxy-novncproxy" Dec 10 07:11:36 crc kubenswrapper[4765]: E1210 07:11:36.723055 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0159e6d-87d4-4050-b843-2f4c4087d850" containerName="nova-metadata-metadata" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.723064 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0159e6d-87d4-4050-b843-2f4c4087d850" containerName="nova-metadata-metadata" Dec 10 07:11:36 crc kubenswrapper[4765]: E1210 07:11:36.723074 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0159e6d-87d4-4050-b843-2f4c4087d850" containerName="nova-metadata-log" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.723096 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0159e6d-87d4-4050-b843-2f4c4087d850" containerName="nova-metadata-log" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.723321 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8e00936-c644-4afc-9d00-1c6da1f5a380" containerName="nova-cell1-novncproxy-novncproxy" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.723345 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0159e6d-87d4-4050-b843-2f4c4087d850" containerName="nova-metadata-metadata" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.723372 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0159e6d-87d4-4050-b843-2f4c4087d850" containerName="nova-metadata-log" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.724613 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.726684 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.726911 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.734552 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.736008 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.739251 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.739505 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.739862 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.747229 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.759278 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.872535 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.872589 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.872948 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.873019 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.875243 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.877558 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.893053 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2d71920-5051-42a8-aada-f45a23df5463-logs\") pod \"nova-metadata-0\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " pod="openstack/nova-metadata-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.893873 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.894304 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.894404 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.894617 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mnv9\" (UniqueName: \"kubernetes.io/projected/c2d71920-5051-42a8-aada-f45a23df5463-kube-api-access-5mnv9\") pod \"nova-metadata-0\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " pod="openstack/nova-metadata-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.894843 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.894906 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bg9m\" (UniqueName: \"kubernetes.io/projected/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-kube-api-access-4bg9m\") pod \"nova-cell1-novncproxy-0\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.894960 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2d71920-5051-42a8-aada-f45a23df5463-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " pod="openstack/nova-metadata-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.895028 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2d71920-5051-42a8-aada-f45a23df5463-config-data\") pod \"nova-metadata-0\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " pod="openstack/nova-metadata-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.895205 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2d71920-5051-42a8-aada-f45a23df5463-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " pod="openstack/nova-metadata-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.999473 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:36 crc kubenswrapper[4765]: I1210 07:11:36.999921 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:36.999984 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mnv9\" (UniqueName: \"kubernetes.io/projected/c2d71920-5051-42a8-aada-f45a23df5463-kube-api-access-5mnv9\") pod \"nova-metadata-0\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " pod="openstack/nova-metadata-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.000035 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.000061 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bg9m\" (UniqueName: \"kubernetes.io/projected/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-kube-api-access-4bg9m\") pod \"nova-cell1-novncproxy-0\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.000095 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2d71920-5051-42a8-aada-f45a23df5463-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " pod="openstack/nova-metadata-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.000120 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2d71920-5051-42a8-aada-f45a23df5463-config-data\") pod \"nova-metadata-0\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " pod="openstack/nova-metadata-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.000157 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2d71920-5051-42a8-aada-f45a23df5463-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " pod="openstack/nova-metadata-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.000186 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2d71920-5051-42a8-aada-f45a23df5463-logs\") pod \"nova-metadata-0\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " pod="openstack/nova-metadata-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.000203 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.002600 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2d71920-5051-42a8-aada-f45a23df5463-logs\") pod \"nova-metadata-0\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " pod="openstack/nova-metadata-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.013900 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2d71920-5051-42a8-aada-f45a23df5463-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " pod="openstack/nova-metadata-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.015850 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.026905 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bg9m\" (UniqueName: \"kubernetes.io/projected/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-kube-api-access-4bg9m\") pod \"nova-cell1-novncproxy-0\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.029056 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2d71920-5051-42a8-aada-f45a23df5463-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " pod="openstack/nova-metadata-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.043858 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.044283 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.046795 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2d71920-5051-42a8-aada-f45a23df5463-config-data\") pod \"nova-metadata-0\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " pod="openstack/nova-metadata-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.050433 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.059711 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.107819 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mnv9\" (UniqueName: \"kubernetes.io/projected/c2d71920-5051-42a8-aada-f45a23df5463-kube-api-access-5mnv9\") pod \"nova-metadata-0\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " pod="openstack/nova-metadata-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.278985 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56c6c8bc97-9hn2v"] Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.281628 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.334206 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56c6c8bc97-9hn2v"] Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.347207 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.432799 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-ovsdbserver-nb\") pod \"dnsmasq-dns-56c6c8bc97-9hn2v\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.432937 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-dns-swift-storage-0\") pod \"dnsmasq-dns-56c6c8bc97-9hn2v\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.433038 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-config\") pod \"dnsmasq-dns-56c6c8bc97-9hn2v\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.433227 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-ovsdbserver-sb\") pod \"dnsmasq-dns-56c6c8bc97-9hn2v\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.433334 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-dns-svc\") pod \"dnsmasq-dns-56c6c8bc97-9hn2v\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.433359 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4k98d\" (UniqueName: \"kubernetes.io/projected/ea4a2d0b-62e8-4527-948f-9f9c76070af1-kube-api-access-4k98d\") pod \"dnsmasq-dns-56c6c8bc97-9hn2v\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.519034 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.538511 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-ovsdbserver-nb\") pod \"dnsmasq-dns-56c6c8bc97-9hn2v\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.538930 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-dns-swift-storage-0\") pod \"dnsmasq-dns-56c6c8bc97-9hn2v\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.539231 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-config\") pod \"dnsmasq-dns-56c6c8bc97-9hn2v\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.539622 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-ovsdbserver-sb\") pod \"dnsmasq-dns-56c6c8bc97-9hn2v\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.540258 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-dns-swift-storage-0\") pod \"dnsmasq-dns-56c6c8bc97-9hn2v\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.540951 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-ovsdbserver-nb\") pod \"dnsmasq-dns-56c6c8bc97-9hn2v\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.540954 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-config\") pod \"dnsmasq-dns-56c6c8bc97-9hn2v\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.541303 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4k98d\" (UniqueName: \"kubernetes.io/projected/ea4a2d0b-62e8-4527-948f-9f9c76070af1-kube-api-access-4k98d\") pod \"dnsmasq-dns-56c6c8bc97-9hn2v\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.541348 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-dns-svc\") pod \"dnsmasq-dns-56c6c8bc97-9hn2v\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.542191 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-dns-svc\") pod \"dnsmasq-dns-56c6c8bc97-9hn2v\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.542728 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-ovsdbserver-sb\") pod \"dnsmasq-dns-56c6c8bc97-9hn2v\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.581042 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jdzvb"] Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.595125 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jdzvb" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.601125 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jdzvb"] Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.631558 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4k98d\" (UniqueName: \"kubernetes.io/projected/ea4a2d0b-62e8-4527-948f-9f9c76070af1-kube-api-access-4k98d\") pod \"dnsmasq-dns-56c6c8bc97-9hn2v\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.643876 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slsj4\" (UniqueName: \"kubernetes.io/projected/189d9632-f5f0-4a80-9eda-e431f79f2cdc-kube-api-access-slsj4\") pod \"redhat-operators-jdzvb\" (UID: \"189d9632-f5f0-4a80-9eda-e431f79f2cdc\") " pod="openshift-marketplace/redhat-operators-jdzvb" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.643992 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/189d9632-f5f0-4a80-9eda-e431f79f2cdc-catalog-content\") pod \"redhat-operators-jdzvb\" (UID: \"189d9632-f5f0-4a80-9eda-e431f79f2cdc\") " pod="openshift-marketplace/redhat-operators-jdzvb" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.644040 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/189d9632-f5f0-4a80-9eda-e431f79f2cdc-utilities\") pod \"redhat-operators-jdzvb\" (UID: \"189d9632-f5f0-4a80-9eda-e431f79f2cdc\") " pod="openshift-marketplace/redhat-operators-jdzvb" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.644710 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.674528 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f9af12be-6bc5-4aa8-bb84-135e3c0727cb","Type":"ContainerStarted","Data":"dba326d00f38aea0d1f16f7db9e11e4e50e2d433fbe13674f301ff609209d76d"} Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.745369 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slsj4\" (UniqueName: \"kubernetes.io/projected/189d9632-f5f0-4a80-9eda-e431f79f2cdc-kube-api-access-slsj4\") pod \"redhat-operators-jdzvb\" (UID: \"189d9632-f5f0-4a80-9eda-e431f79f2cdc\") " pod="openshift-marketplace/redhat-operators-jdzvb" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.745530 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/189d9632-f5f0-4a80-9eda-e431f79f2cdc-catalog-content\") pod \"redhat-operators-jdzvb\" (UID: \"189d9632-f5f0-4a80-9eda-e431f79f2cdc\") " pod="openshift-marketplace/redhat-operators-jdzvb" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.745581 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/189d9632-f5f0-4a80-9eda-e431f79f2cdc-utilities\") pod \"redhat-operators-jdzvb\" (UID: \"189d9632-f5f0-4a80-9eda-e431f79f2cdc\") " pod="openshift-marketplace/redhat-operators-jdzvb" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.750576 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/189d9632-f5f0-4a80-9eda-e431f79f2cdc-utilities\") pod \"redhat-operators-jdzvb\" (UID: \"189d9632-f5f0-4a80-9eda-e431f79f2cdc\") " pod="openshift-marketplace/redhat-operators-jdzvb" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.761655 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/189d9632-f5f0-4a80-9eda-e431f79f2cdc-catalog-content\") pod \"redhat-operators-jdzvb\" (UID: \"189d9632-f5f0-4a80-9eda-e431f79f2cdc\") " pod="openshift-marketplace/redhat-operators-jdzvb" Dec 10 07:11:37 crc kubenswrapper[4765]: I1210 07:11:37.777862 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slsj4\" (UniqueName: \"kubernetes.io/projected/189d9632-f5f0-4a80-9eda-e431f79f2cdc-kube-api-access-slsj4\") pod \"redhat-operators-jdzvb\" (UID: \"189d9632-f5f0-4a80-9eda-e431f79f2cdc\") " pod="openshift-marketplace/redhat-operators-jdzvb" Dec 10 07:11:38 crc kubenswrapper[4765]: I1210 07:11:38.004478 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56c6c8bc97-9hn2v"] Dec 10 07:11:38 crc kubenswrapper[4765]: I1210 07:11:38.020389 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jdzvb" Dec 10 07:11:38 crc kubenswrapper[4765]: I1210 07:11:38.036828 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 07:11:38 crc kubenswrapper[4765]: I1210 07:11:38.638966 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8e00936-c644-4afc-9d00-1c6da1f5a380" path="/var/lib/kubelet/pods/a8e00936-c644-4afc-9d00-1c6da1f5a380/volumes" Dec 10 07:11:38 crc kubenswrapper[4765]: I1210 07:11:38.640065 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0159e6d-87d4-4050-b843-2f4c4087d850" path="/var/lib/kubelet/pods/e0159e6d-87d4-4050-b843-2f4c4087d850/volumes" Dec 10 07:11:38 crc kubenswrapper[4765]: I1210 07:11:38.640982 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jdzvb"] Dec 10 07:11:38 crc kubenswrapper[4765]: I1210 07:11:38.708019 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" event={"ID":"ea4a2d0b-62e8-4527-948f-9f9c76070af1","Type":"ContainerStarted","Data":"bd271ae05f43791caf944cab99a0cb4df18082c95d8817a16dccc9c6bff3afb1"} Dec 10 07:11:38 crc kubenswrapper[4765]: I1210 07:11:38.708076 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" event={"ID":"ea4a2d0b-62e8-4527-948f-9f9c76070af1","Type":"ContainerStarted","Data":"641171b1c810999c2ce70181f39975175fe8d345e816cb8977906ee989a0fe3d"} Dec 10 07:11:38 crc kubenswrapper[4765]: I1210 07:11:38.712004 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c2d71920-5051-42a8-aada-f45a23df5463","Type":"ContainerStarted","Data":"c2d05fd1c4884de85935e64eb2547089375ca4199364a46ed8db39a1c61b292b"} Dec 10 07:11:38 crc kubenswrapper[4765]: I1210 07:11:38.712202 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c2d71920-5051-42a8-aada-f45a23df5463","Type":"ContainerStarted","Data":"556e82ba84fca4b26853a37b03009af24083a54362bc32b66be81548f4d72891"} Dec 10 07:11:38 crc kubenswrapper[4765]: I1210 07:11:38.715797 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f9af12be-6bc5-4aa8-bb84-135e3c0727cb","Type":"ContainerStarted","Data":"ac0fc8ce7b9fc642344add6c6901d9f6b0f18979e1bb126414b7d8b564fa757e"} Dec 10 07:11:38 crc kubenswrapper[4765]: I1210 07:11:38.731835 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdzvb" event={"ID":"189d9632-f5f0-4a80-9eda-e431f79f2cdc","Type":"ContainerStarted","Data":"b5075a6a5db31c72b69a72539b14d95d6311f283ff571ea76d769ec6616c7b1d"} Dec 10 07:11:38 crc kubenswrapper[4765]: I1210 07:11:38.753617 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.753580377 podStartE2EDuration="2.753580377s" podCreationTimestamp="2025-12-10 07:11:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:11:38.738152348 +0000 UTC m=+1418.464817664" watchObservedRunningTime="2025-12-10 07:11:38.753580377 +0000 UTC m=+1418.480245693" Dec 10 07:11:39 crc kubenswrapper[4765]: I1210 07:11:39.746784 4765 generic.go:334] "Generic (PLEG): container finished" podID="189d9632-f5f0-4a80-9eda-e431f79f2cdc" containerID="c98cb39f2737aa048a968ecbf395bca52aa3b6132e57ba68ac63d54b6968b581" exitCode=0 Dec 10 07:11:39 crc kubenswrapper[4765]: I1210 07:11:39.746871 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdzvb" event={"ID":"189d9632-f5f0-4a80-9eda-e431f79f2cdc","Type":"ContainerDied","Data":"c98cb39f2737aa048a968ecbf395bca52aa3b6132e57ba68ac63d54b6968b581"} Dec 10 07:11:39 crc kubenswrapper[4765]: I1210 07:11:39.754076 4765 generic.go:334] "Generic (PLEG): container finished" podID="ea4a2d0b-62e8-4527-948f-9f9c76070af1" containerID="bd271ae05f43791caf944cab99a0cb4df18082c95d8817a16dccc9c6bff3afb1" exitCode=0 Dec 10 07:11:39 crc kubenswrapper[4765]: I1210 07:11:39.754231 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" event={"ID":"ea4a2d0b-62e8-4527-948f-9f9c76070af1","Type":"ContainerDied","Data":"bd271ae05f43791caf944cab99a0cb4df18082c95d8817a16dccc9c6bff3afb1"} Dec 10 07:11:39 crc kubenswrapper[4765]: I1210 07:11:39.758144 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c2d71920-5051-42a8-aada-f45a23df5463","Type":"ContainerStarted","Data":"10274cb3f949c350460665760d5212cdcdd1f4de21a6f6d4dd473a591b2d9712"} Dec 10 07:11:40 crc kubenswrapper[4765]: I1210 07:11:40.065193 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=4.065171939 podStartE2EDuration="4.065171939s" podCreationTimestamp="2025-12-10 07:11:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:11:40.058364005 +0000 UTC m=+1419.785029321" watchObservedRunningTime="2025-12-10 07:11:40.065171939 +0000 UTC m=+1419.791837255" Dec 10 07:11:40 crc kubenswrapper[4765]: I1210 07:11:40.768638 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdzvb" event={"ID":"189d9632-f5f0-4a80-9eda-e431f79f2cdc","Type":"ContainerStarted","Data":"7303b41db854844fa2922dc4d1781ece6e68ea10d0891f70cb406657ea6fa73b"} Dec 10 07:11:40 crc kubenswrapper[4765]: I1210 07:11:40.770765 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" event={"ID":"ea4a2d0b-62e8-4527-948f-9f9c76070af1","Type":"ContainerStarted","Data":"18dc864cc764071e1ec5b63dc28acdb5ea1bab6885de1a4d32d341104a46d4bc"} Dec 10 07:11:40 crc kubenswrapper[4765]: I1210 07:11:40.771451 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:40 crc kubenswrapper[4765]: I1210 07:11:40.826515 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" podStartSLOduration=3.826490847 podStartE2EDuration="3.826490847s" podCreationTimestamp="2025-12-10 07:11:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:11:40.812988333 +0000 UTC m=+1420.539653649" watchObservedRunningTime="2025-12-10 07:11:40.826490847 +0000 UTC m=+1420.553156163" Dec 10 07:11:40 crc kubenswrapper[4765]: I1210 07:11:40.981311 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:11:40 crc kubenswrapper[4765]: I1210 07:11:40.981565 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2971712d-0322-490c-8c4d-555c5a3a7edc" containerName="nova-api-log" containerID="cri-o://4b487c7e3be484aba49f17357e68aab511b09c492ad3b1c03178cfad923540c8" gracePeriod=30 Dec 10 07:11:40 crc kubenswrapper[4765]: I1210 07:11:40.982065 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2971712d-0322-490c-8c4d-555c5a3a7edc" containerName="nova-api-api" containerID="cri-o://442e596ffc0b66c42c08dfc6ea992b91560c8f009fd692eaf3868dfc8c21968b" gracePeriod=30 Dec 10 07:11:41 crc kubenswrapper[4765]: I1210 07:11:41.783946 4765 generic.go:334] "Generic (PLEG): container finished" podID="2971712d-0322-490c-8c4d-555c5a3a7edc" containerID="4b487c7e3be484aba49f17357e68aab511b09c492ad3b1c03178cfad923540c8" exitCode=143 Dec 10 07:11:41 crc kubenswrapper[4765]: I1210 07:11:41.784049 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2971712d-0322-490c-8c4d-555c5a3a7edc","Type":"ContainerDied","Data":"4b487c7e3be484aba49f17357e68aab511b09c492ad3b1c03178cfad923540c8"} Dec 10 07:11:42 crc kubenswrapper[4765]: I1210 07:11:42.060553 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:42 crc kubenswrapper[4765]: I1210 07:11:42.299764 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:11:42 crc kubenswrapper[4765]: I1210 07:11:42.300155 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerName="ceilometer-central-agent" containerID="cri-o://c22d77d19f3e8a8901f085bd7349437f4040978e0b9c0a92681d5964ecfb7acd" gracePeriod=30 Dec 10 07:11:42 crc kubenswrapper[4765]: I1210 07:11:42.300253 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerName="proxy-httpd" containerID="cri-o://25937422f4ef3a630b37ce488a01e4ab3527e4324465f68ce84ad08278152714" gracePeriod=30 Dec 10 07:11:42 crc kubenswrapper[4765]: I1210 07:11:42.300311 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerName="ceilometer-notification-agent" containerID="cri-o://d9b93f663c97cdb81bfac4efadfe9be275d574685cef8d6fd4150ba5d80bbf90" gracePeriod=30 Dec 10 07:11:42 crc kubenswrapper[4765]: I1210 07:11:42.300290 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerName="sg-core" containerID="cri-o://9b41f82c1855e8c2b81da8542fe24345f5a9c8ac84ec1d97c75c80956a103a44" gracePeriod=30 Dec 10 07:11:42 crc kubenswrapper[4765]: I1210 07:11:42.348183 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 10 07:11:42 crc kubenswrapper[4765]: I1210 07:11:42.348666 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 10 07:11:42 crc kubenswrapper[4765]: I1210 07:11:42.800268 4765 generic.go:334] "Generic (PLEG): container finished" podID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerID="25937422f4ef3a630b37ce488a01e4ab3527e4324465f68ce84ad08278152714" exitCode=0 Dec 10 07:11:42 crc kubenswrapper[4765]: I1210 07:11:42.800324 4765 generic.go:334] "Generic (PLEG): container finished" podID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerID="9b41f82c1855e8c2b81da8542fe24345f5a9c8ac84ec1d97c75c80956a103a44" exitCode=2 Dec 10 07:11:42 crc kubenswrapper[4765]: I1210 07:11:42.800312 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb32e2e3-4e02-451f-9516-20da84ab8f6f","Type":"ContainerDied","Data":"25937422f4ef3a630b37ce488a01e4ab3527e4324465f68ce84ad08278152714"} Dec 10 07:11:42 crc kubenswrapper[4765]: I1210 07:11:42.800387 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb32e2e3-4e02-451f-9516-20da84ab8f6f","Type":"ContainerDied","Data":"9b41f82c1855e8c2b81da8542fe24345f5a9c8ac84ec1d97c75c80956a103a44"} Dec 10 07:11:42 crc kubenswrapper[4765]: I1210 07:11:42.803300 4765 generic.go:334] "Generic (PLEG): container finished" podID="189d9632-f5f0-4a80-9eda-e431f79f2cdc" containerID="7303b41db854844fa2922dc4d1781ece6e68ea10d0891f70cb406657ea6fa73b" exitCode=0 Dec 10 07:11:42 crc kubenswrapper[4765]: I1210 07:11:42.803405 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdzvb" event={"ID":"189d9632-f5f0-4a80-9eda-e431f79f2cdc","Type":"ContainerDied","Data":"7303b41db854844fa2922dc4d1781ece6e68ea10d0891f70cb406657ea6fa73b"} Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.808450 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.827012 4765 generic.go:334] "Generic (PLEG): container finished" podID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerID="c22d77d19f3e8a8901f085bd7349437f4040978e0b9c0a92681d5964ecfb7acd" exitCode=0 Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.827117 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb32e2e3-4e02-451f-9516-20da84ab8f6f","Type":"ContainerDied","Data":"c22d77d19f3e8a8901f085bd7349437f4040978e0b9c0a92681d5964ecfb7acd"} Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.836559 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdzvb" event={"ID":"189d9632-f5f0-4a80-9eda-e431f79f2cdc","Type":"ContainerStarted","Data":"c042dcbdbf676416a63b08eacde076020ffb2aed98e29760a45f884ad8417a83"} Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.839451 4765 generic.go:334] "Generic (PLEG): container finished" podID="2971712d-0322-490c-8c4d-555c5a3a7edc" containerID="442e596ffc0b66c42c08dfc6ea992b91560c8f009fd692eaf3868dfc8c21968b" exitCode=0 Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.839500 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2971712d-0322-490c-8c4d-555c5a3a7edc","Type":"ContainerDied","Data":"442e596ffc0b66c42c08dfc6ea992b91560c8f009fd692eaf3868dfc8c21968b"} Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.839517 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.839570 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2971712d-0322-490c-8c4d-555c5a3a7edc","Type":"ContainerDied","Data":"31d982dddd9abcf425f4906af15f45fd8416f7ba89bec3437465ccbcfdb4d27a"} Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.839594 4765 scope.go:117] "RemoveContainer" containerID="442e596ffc0b66c42c08dfc6ea992b91560c8f009fd692eaf3868dfc8c21968b" Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.869447 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jdzvb" podStartSLOduration=3.546611045 podStartE2EDuration="7.869359938s" podCreationTimestamp="2025-12-10 07:11:37 +0000 UTC" firstStartedPulling="2025-12-10 07:11:39.74884562 +0000 UTC m=+1419.475510936" lastFinishedPulling="2025-12-10 07:11:44.071594513 +0000 UTC m=+1423.798259829" observedRunningTime="2025-12-10 07:11:44.867584108 +0000 UTC m=+1424.594249444" watchObservedRunningTime="2025-12-10 07:11:44.869359938 +0000 UTC m=+1424.596025254" Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.905811 4765 scope.go:117] "RemoveContainer" containerID="4b487c7e3be484aba49f17357e68aab511b09c492ad3b1c03178cfad923540c8" Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.944537 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2971712d-0322-490c-8c4d-555c5a3a7edc-logs\") pod \"2971712d-0322-490c-8c4d-555c5a3a7edc\" (UID: \"2971712d-0322-490c-8c4d-555c5a3a7edc\") " Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.944846 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77xnw\" (UniqueName: \"kubernetes.io/projected/2971712d-0322-490c-8c4d-555c5a3a7edc-kube-api-access-77xnw\") pod \"2971712d-0322-490c-8c4d-555c5a3a7edc\" (UID: \"2971712d-0322-490c-8c4d-555c5a3a7edc\") " Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.944893 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2971712d-0322-490c-8c4d-555c5a3a7edc-config-data\") pod \"2971712d-0322-490c-8c4d-555c5a3a7edc\" (UID: \"2971712d-0322-490c-8c4d-555c5a3a7edc\") " Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.945007 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2971712d-0322-490c-8c4d-555c5a3a7edc-combined-ca-bundle\") pod \"2971712d-0322-490c-8c4d-555c5a3a7edc\" (UID: \"2971712d-0322-490c-8c4d-555c5a3a7edc\") " Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.946804 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2971712d-0322-490c-8c4d-555c5a3a7edc-logs" (OuterVolumeSpecName: "logs") pod "2971712d-0322-490c-8c4d-555c5a3a7edc" (UID: "2971712d-0322-490c-8c4d-555c5a3a7edc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.947712 4765 scope.go:117] "RemoveContainer" containerID="442e596ffc0b66c42c08dfc6ea992b91560c8f009fd692eaf3868dfc8c21968b" Dec 10 07:11:44 crc kubenswrapper[4765]: E1210 07:11:44.948329 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"442e596ffc0b66c42c08dfc6ea992b91560c8f009fd692eaf3868dfc8c21968b\": container with ID starting with 442e596ffc0b66c42c08dfc6ea992b91560c8f009fd692eaf3868dfc8c21968b not found: ID does not exist" containerID="442e596ffc0b66c42c08dfc6ea992b91560c8f009fd692eaf3868dfc8c21968b" Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.948361 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"442e596ffc0b66c42c08dfc6ea992b91560c8f009fd692eaf3868dfc8c21968b"} err="failed to get container status \"442e596ffc0b66c42c08dfc6ea992b91560c8f009fd692eaf3868dfc8c21968b\": rpc error: code = NotFound desc = could not find container \"442e596ffc0b66c42c08dfc6ea992b91560c8f009fd692eaf3868dfc8c21968b\": container with ID starting with 442e596ffc0b66c42c08dfc6ea992b91560c8f009fd692eaf3868dfc8c21968b not found: ID does not exist" Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.948390 4765 scope.go:117] "RemoveContainer" containerID="4b487c7e3be484aba49f17357e68aab511b09c492ad3b1c03178cfad923540c8" Dec 10 07:11:44 crc kubenswrapper[4765]: E1210 07:11:44.949823 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b487c7e3be484aba49f17357e68aab511b09c492ad3b1c03178cfad923540c8\": container with ID starting with 4b487c7e3be484aba49f17357e68aab511b09c492ad3b1c03178cfad923540c8 not found: ID does not exist" containerID="4b487c7e3be484aba49f17357e68aab511b09c492ad3b1c03178cfad923540c8" Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.949850 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b487c7e3be484aba49f17357e68aab511b09c492ad3b1c03178cfad923540c8"} err="failed to get container status \"4b487c7e3be484aba49f17357e68aab511b09c492ad3b1c03178cfad923540c8\": rpc error: code = NotFound desc = could not find container \"4b487c7e3be484aba49f17357e68aab511b09c492ad3b1c03178cfad923540c8\": container with ID starting with 4b487c7e3be484aba49f17357e68aab511b09c492ad3b1c03178cfad923540c8 not found: ID does not exist" Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.954139 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2971712d-0322-490c-8c4d-555c5a3a7edc-kube-api-access-77xnw" (OuterVolumeSpecName: "kube-api-access-77xnw") pod "2971712d-0322-490c-8c4d-555c5a3a7edc" (UID: "2971712d-0322-490c-8c4d-555c5a3a7edc"). InnerVolumeSpecName "kube-api-access-77xnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:11:44 crc kubenswrapper[4765]: I1210 07:11:44.994269 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2971712d-0322-490c-8c4d-555c5a3a7edc-config-data" (OuterVolumeSpecName: "config-data") pod "2971712d-0322-490c-8c4d-555c5a3a7edc" (UID: "2971712d-0322-490c-8c4d-555c5a3a7edc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.021721 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2971712d-0322-490c-8c4d-555c5a3a7edc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2971712d-0322-490c-8c4d-555c5a3a7edc" (UID: "2971712d-0322-490c-8c4d-555c5a3a7edc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.048191 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2971712d-0322-490c-8c4d-555c5a3a7edc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.048226 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2971712d-0322-490c-8c4d-555c5a3a7edc-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.048236 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77xnw\" (UniqueName: \"kubernetes.io/projected/2971712d-0322-490c-8c4d-555c5a3a7edc-kube-api-access-77xnw\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.048249 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2971712d-0322-490c-8c4d-555c5a3a7edc-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.218232 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.233599 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.254013 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 10 07:11:45 crc kubenswrapper[4765]: E1210 07:11:45.254491 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2971712d-0322-490c-8c4d-555c5a3a7edc" containerName="nova-api-api" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.254509 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="2971712d-0322-490c-8c4d-555c5a3a7edc" containerName="nova-api-api" Dec 10 07:11:45 crc kubenswrapper[4765]: E1210 07:11:45.254526 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2971712d-0322-490c-8c4d-555c5a3a7edc" containerName="nova-api-log" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.254533 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="2971712d-0322-490c-8c4d-555c5a3a7edc" containerName="nova-api-log" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.254719 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="2971712d-0322-490c-8c4d-555c5a3a7edc" containerName="nova-api-api" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.254738 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="2971712d-0322-490c-8c4d-555c5a3a7edc" containerName="nova-api-log" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.255779 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.258238 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.258264 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.259489 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.284923 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.354498 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-config-data\") pod \"nova-api-0\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.354550 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-public-tls-certs\") pod \"nova-api-0\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.354573 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-internal-tls-certs\") pod \"nova-api-0\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.354592 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd082226-18a8-469e-b28a-7094701112a3-logs\") pod \"nova-api-0\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.354740 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq28m\" (UniqueName: \"kubernetes.io/projected/dd082226-18a8-469e-b28a-7094701112a3-kube-api-access-fq28m\") pod \"nova-api-0\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.354776 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.456462 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq28m\" (UniqueName: \"kubernetes.io/projected/dd082226-18a8-469e-b28a-7094701112a3-kube-api-access-fq28m\") pod \"nova-api-0\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.456896 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.457004 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-config-data\") pod \"nova-api-0\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.457049 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-public-tls-certs\") pod \"nova-api-0\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.457068 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-internal-tls-certs\") pod \"nova-api-0\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.457085 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd082226-18a8-469e-b28a-7094701112a3-logs\") pod \"nova-api-0\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.457499 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd082226-18a8-469e-b28a-7094701112a3-logs\") pod \"nova-api-0\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.461681 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-public-tls-certs\") pod \"nova-api-0\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.463066 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-config-data\") pod \"nova-api-0\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.464784 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.471489 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-internal-tls-certs\") pod \"nova-api-0\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.477676 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq28m\" (UniqueName: \"kubernetes.io/projected/dd082226-18a8-469e-b28a-7094701112a3-kube-api-access-fq28m\") pod \"nova-api-0\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.575586 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.861955 4765 generic.go:334] "Generic (PLEG): container finished" podID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerID="d9b93f663c97cdb81bfac4efadfe9be275d574685cef8d6fd4150ba5d80bbf90" exitCode=0 Dec 10 07:11:45 crc kubenswrapper[4765]: I1210 07:11:45.862440 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb32e2e3-4e02-451f-9516-20da84ab8f6f","Type":"ContainerDied","Data":"d9b93f663c97cdb81bfac4efadfe9be275d574685cef8d6fd4150ba5d80bbf90"} Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.120991 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:11:46 crc kubenswrapper[4765]: W1210 07:11:46.123745 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd082226_18a8_469e_b28a_7094701112a3.slice/crio-096c7579da5f4ca9abb2490dc570c0904dc206107fb34c8786e72a661fedc8d2 WatchSource:0}: Error finding container 096c7579da5f4ca9abb2490dc570c0904dc206107fb34c8786e72a661fedc8d2: Status 404 returned error can't find the container with id 096c7579da5f4ca9abb2490dc570c0904dc206107fb34c8786e72a661fedc8d2 Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.272865 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.379991 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-ceilometer-tls-certs\") pod \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.380366 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-combined-ca-bundle\") pod \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.380460 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb32e2e3-4e02-451f-9516-20da84ab8f6f-run-httpd\") pod \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.380524 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb32e2e3-4e02-451f-9516-20da84ab8f6f-log-httpd\") pod \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.380664 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-sg-core-conf-yaml\") pod \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.380705 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-scripts\") pod \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.380749 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ws6fc\" (UniqueName: \"kubernetes.io/projected/cb32e2e3-4e02-451f-9516-20da84ab8f6f-kube-api-access-ws6fc\") pod \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.380782 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-config-data\") pod \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\" (UID: \"cb32e2e3-4e02-451f-9516-20da84ab8f6f\") " Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.381580 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb32e2e3-4e02-451f-9516-20da84ab8f6f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "cb32e2e3-4e02-451f-9516-20da84ab8f6f" (UID: "cb32e2e3-4e02-451f-9516-20da84ab8f6f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.381771 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb32e2e3-4e02-451f-9516-20da84ab8f6f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "cb32e2e3-4e02-451f-9516-20da84ab8f6f" (UID: "cb32e2e3-4e02-451f-9516-20da84ab8f6f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.391351 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-scripts" (OuterVolumeSpecName: "scripts") pod "cb32e2e3-4e02-451f-9516-20da84ab8f6f" (UID: "cb32e2e3-4e02-451f-9516-20da84ab8f6f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.400716 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb32e2e3-4e02-451f-9516-20da84ab8f6f-kube-api-access-ws6fc" (OuterVolumeSpecName: "kube-api-access-ws6fc") pod "cb32e2e3-4e02-451f-9516-20da84ab8f6f" (UID: "cb32e2e3-4e02-451f-9516-20da84ab8f6f"). InnerVolumeSpecName "kube-api-access-ws6fc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.482906 4765 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb32e2e3-4e02-451f-9516-20da84ab8f6f-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.482943 4765 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb32e2e3-4e02-451f-9516-20da84ab8f6f-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.482957 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.482969 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ws6fc\" (UniqueName: \"kubernetes.io/projected/cb32e2e3-4e02-451f-9516-20da84ab8f6f-kube-api-access-ws6fc\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.563470 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "cb32e2e3-4e02-451f-9516-20da84ab8f6f" (UID: "cb32e2e3-4e02-451f-9516-20da84ab8f6f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.588463 4765 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.588894 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "cb32e2e3-4e02-451f-9516-20da84ab8f6f" (UID: "cb32e2e3-4e02-451f-9516-20da84ab8f6f"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.619160 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2971712d-0322-490c-8c4d-555c5a3a7edc" path="/var/lib/kubelet/pods/2971712d-0322-490c-8c4d-555c5a3a7edc/volumes" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.654351 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cb32e2e3-4e02-451f-9516-20da84ab8f6f" (UID: "cb32e2e3-4e02-451f-9516-20da84ab8f6f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.657013 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-config-data" (OuterVolumeSpecName: "config-data") pod "cb32e2e3-4e02-451f-9516-20da84ab8f6f" (UID: "cb32e2e3-4e02-451f-9516-20da84ab8f6f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.693547 4765 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.693585 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.693594 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb32e2e3-4e02-451f-9516-20da84ab8f6f-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.882022 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dd082226-18a8-469e-b28a-7094701112a3","Type":"ContainerStarted","Data":"c2f77e8fde94e071cc777030ff6e5d92b09df03d32b3b8fc09589c5858266722"} Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.882072 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dd082226-18a8-469e-b28a-7094701112a3","Type":"ContainerStarted","Data":"cafc595c3b12efb85ed1729282dd231efbf831beff168907f4af848da759300c"} Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.882113 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dd082226-18a8-469e-b28a-7094701112a3","Type":"ContainerStarted","Data":"096c7579da5f4ca9abb2490dc570c0904dc206107fb34c8786e72a661fedc8d2"} Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.888115 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb32e2e3-4e02-451f-9516-20da84ab8f6f","Type":"ContainerDied","Data":"ce4a6f3ade1f7c385785d89572ec8ac0e7b9df851a9527479d8144cb61d9db0a"} Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.888163 4765 scope.go:117] "RemoveContainer" containerID="25937422f4ef3a630b37ce488a01e4ab3527e4324465f68ce84ad08278152714" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.888319 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.907839 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.907821108 podStartE2EDuration="1.907821108s" podCreationTimestamp="2025-12-10 07:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:11:46.904430651 +0000 UTC m=+1426.631095967" watchObservedRunningTime="2025-12-10 07:11:46.907821108 +0000 UTC m=+1426.634486424" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.941062 4765 scope.go:117] "RemoveContainer" containerID="9b41f82c1855e8c2b81da8542fe24345f5a9c8ac84ec1d97c75c80956a103a44" Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.950867 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:11:46 crc kubenswrapper[4765]: I1210 07:11:46.993291 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.040520 4765 scope.go:117] "RemoveContainer" containerID="d9b93f663c97cdb81bfac4efadfe9be275d574685cef8d6fd4150ba5d80bbf90" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.057810 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:11:47 crc kubenswrapper[4765]: E1210 07:11:47.058662 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerName="ceilometer-notification-agent" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.058690 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerName="ceilometer-notification-agent" Dec 10 07:11:47 crc kubenswrapper[4765]: E1210 07:11:47.058724 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerName="proxy-httpd" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.058734 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerName="proxy-httpd" Dec 10 07:11:47 crc kubenswrapper[4765]: E1210 07:11:47.058764 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerName="sg-core" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.058773 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerName="sg-core" Dec 10 07:11:47 crc kubenswrapper[4765]: E1210 07:11:47.058787 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerName="ceilometer-central-agent" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.058794 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerName="ceilometer-central-agent" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.059044 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerName="ceilometer-notification-agent" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.059068 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerName="ceilometer-central-agent" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.059087 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerName="sg-core" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.059119 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" containerName="proxy-httpd" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.070363 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.070948 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.074218 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.074572 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.074747 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.108684 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.116636 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.135009 4765 scope.go:117] "RemoveContainer" containerID="c22d77d19f3e8a8901f085bd7349437f4040978e0b9c0a92681d5964ecfb7acd" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.230662 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8847d4d-d633-4ca3-90fa-3384e525864d-log-httpd\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.230871 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-config-data\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.231026 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8847d4d-d633-4ca3-90fa-3384e525864d-run-httpd\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.231119 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-scripts\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.231206 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.231494 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.231762 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.231833 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tm64z\" (UniqueName: \"kubernetes.io/projected/c8847d4d-d633-4ca3-90fa-3384e525864d-kube-api-access-tm64z\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.333648 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8847d4d-d633-4ca3-90fa-3384e525864d-log-httpd\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.333779 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-config-data\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.333840 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8847d4d-d633-4ca3-90fa-3384e525864d-run-httpd\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.333876 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-scripts\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.333945 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.334012 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.334038 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.334061 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tm64z\" (UniqueName: \"kubernetes.io/projected/c8847d4d-d633-4ca3-90fa-3384e525864d-kube-api-access-tm64z\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.334400 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8847d4d-d633-4ca3-90fa-3384e525864d-log-httpd\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.334667 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8847d4d-d633-4ca3-90fa-3384e525864d-run-httpd\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.340638 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.340645 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-scripts\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.342557 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.343398 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.344284 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-config-data\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.349149 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.349203 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.358656 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tm64z\" (UniqueName: \"kubernetes.io/projected/c8847d4d-d633-4ca3-90fa-3384e525864d-kube-api-access-tm64z\") pod \"ceilometer-0\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.431339 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.647240 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.722963 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bf969cb77-2nqww"] Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.723231 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bf969cb77-2nqww" podUID="64681b0b-abc7-459f-858a-a1e8a8ec168c" containerName="dnsmasq-dns" containerID="cri-o://ffa9d3523866a55382d9fc919655059179659d86fd060a8be0e8fc5c216d66a7" gracePeriod=10 Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.831412 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-bf969cb77-2nqww" podUID="64681b0b-abc7-459f-858a-a1e8a8ec168c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.188:5353: connect: connection refused" Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.941565 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.943876 4765 generic.go:334] "Generic (PLEG): container finished" podID="64681b0b-abc7-459f-858a-a1e8a8ec168c" containerID="ffa9d3523866a55382d9fc919655059179659d86fd060a8be0e8fc5c216d66a7" exitCode=0 Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.944936 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bf969cb77-2nqww" event={"ID":"64681b0b-abc7-459f-858a-a1e8a8ec168c","Type":"ContainerDied","Data":"ffa9d3523866a55382d9fc919655059179659d86fd060a8be0e8fc5c216d66a7"} Dec 10 07:11:47 crc kubenswrapper[4765]: W1210 07:11:47.952725 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc8847d4d_d633_4ca3_90fa_3384e525864d.slice/crio-aa360b4d24812d38d9d2f1c956e002d5537c82bf7c445513720cbadc19e3682a WatchSource:0}: Error finding container aa360b4d24812d38d9d2f1c956e002d5537c82bf7c445513720cbadc19e3682a: Status 404 returned error can't find the container with id aa360b4d24812d38d9d2f1c956e002d5537c82bf7c445513720cbadc19e3682a Dec 10 07:11:47 crc kubenswrapper[4765]: I1210 07:11:47.964247 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.029624 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jdzvb" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.029685 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jdzvb" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.250639 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-m9wx8"] Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.252440 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-m9wx8" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.254905 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.255148 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.261076 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-m9wx8"] Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.374476 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c2d71920-5051-42a8-aada-f45a23df5463" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.374560 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c2d71920-5051-42a8-aada-f45a23df5463" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.377631 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-scripts\") pod \"nova-cell1-cell-mapping-m9wx8\" (UID: \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\") " pod="openstack/nova-cell1-cell-mapping-m9wx8" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.377680 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-m9wx8\" (UID: \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\") " pod="openstack/nova-cell1-cell-mapping-m9wx8" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.377789 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-config-data\") pod \"nova-cell1-cell-mapping-m9wx8\" (UID: \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\") " pod="openstack/nova-cell1-cell-mapping-m9wx8" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.377842 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fjh5\" (UniqueName: \"kubernetes.io/projected/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-kube-api-access-6fjh5\") pod \"nova-cell1-cell-mapping-m9wx8\" (UID: \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\") " pod="openstack/nova-cell1-cell-mapping-m9wx8" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.453478 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.479762 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-scripts\") pod \"nova-cell1-cell-mapping-m9wx8\" (UID: \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\") " pod="openstack/nova-cell1-cell-mapping-m9wx8" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.479835 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-m9wx8\" (UID: \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\") " pod="openstack/nova-cell1-cell-mapping-m9wx8" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.479908 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-config-data\") pod \"nova-cell1-cell-mapping-m9wx8\" (UID: \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\") " pod="openstack/nova-cell1-cell-mapping-m9wx8" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.479948 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fjh5\" (UniqueName: \"kubernetes.io/projected/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-kube-api-access-6fjh5\") pod \"nova-cell1-cell-mapping-m9wx8\" (UID: \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\") " pod="openstack/nova-cell1-cell-mapping-m9wx8" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.490072 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-m9wx8\" (UID: \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\") " pod="openstack/nova-cell1-cell-mapping-m9wx8" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.496776 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-scripts\") pod \"nova-cell1-cell-mapping-m9wx8\" (UID: \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\") " pod="openstack/nova-cell1-cell-mapping-m9wx8" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.501589 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-config-data\") pod \"nova-cell1-cell-mapping-m9wx8\" (UID: \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\") " pod="openstack/nova-cell1-cell-mapping-m9wx8" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.502903 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fjh5\" (UniqueName: \"kubernetes.io/projected/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-kube-api-access-6fjh5\") pod \"nova-cell1-cell-mapping-m9wx8\" (UID: \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\") " pod="openstack/nova-cell1-cell-mapping-m9wx8" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.580886 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g9l82\" (UniqueName: \"kubernetes.io/projected/64681b0b-abc7-459f-858a-a1e8a8ec168c-kube-api-access-g9l82\") pod \"64681b0b-abc7-459f-858a-a1e8a8ec168c\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.581003 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-ovsdbserver-nb\") pod \"64681b0b-abc7-459f-858a-a1e8a8ec168c\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.581161 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-ovsdbserver-sb\") pod \"64681b0b-abc7-459f-858a-a1e8a8ec168c\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.581274 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-dns-svc\") pod \"64681b0b-abc7-459f-858a-a1e8a8ec168c\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.581294 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-dns-swift-storage-0\") pod \"64681b0b-abc7-459f-858a-a1e8a8ec168c\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.581330 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-config\") pod \"64681b0b-abc7-459f-858a-a1e8a8ec168c\" (UID: \"64681b0b-abc7-459f-858a-a1e8a8ec168c\") " Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.585415 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64681b0b-abc7-459f-858a-a1e8a8ec168c-kube-api-access-g9l82" (OuterVolumeSpecName: "kube-api-access-g9l82") pod "64681b0b-abc7-459f-858a-a1e8a8ec168c" (UID: "64681b0b-abc7-459f-858a-a1e8a8ec168c"). InnerVolumeSpecName "kube-api-access-g9l82". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.594996 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-m9wx8" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.611292 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb32e2e3-4e02-451f-9516-20da84ab8f6f" path="/var/lib/kubelet/pods/cb32e2e3-4e02-451f-9516-20da84ab8f6f/volumes" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.652515 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "64681b0b-abc7-459f-858a-a1e8a8ec168c" (UID: "64681b0b-abc7-459f-858a-a1e8a8ec168c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.662852 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-config" (OuterVolumeSpecName: "config") pod "64681b0b-abc7-459f-858a-a1e8a8ec168c" (UID: "64681b0b-abc7-459f-858a-a1e8a8ec168c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.662881 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "64681b0b-abc7-459f-858a-a1e8a8ec168c" (UID: "64681b0b-abc7-459f-858a-a1e8a8ec168c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.680750 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "64681b0b-abc7-459f-858a-a1e8a8ec168c" (UID: "64681b0b-abc7-459f-858a-a1e8a8ec168c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.683688 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.683730 4765 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.683742 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.683753 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g9l82\" (UniqueName: \"kubernetes.io/projected/64681b0b-abc7-459f-858a-a1e8a8ec168c-kube-api-access-g9l82\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.683768 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.690561 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "64681b0b-abc7-459f-858a-a1e8a8ec168c" (UID: "64681b0b-abc7-459f-858a-a1e8a8ec168c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.786137 4765 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64681b0b-abc7-459f-858a-a1e8a8ec168c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.972708 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8847d4d-d633-4ca3-90fa-3384e525864d","Type":"ContainerStarted","Data":"aa360b4d24812d38d9d2f1c956e002d5537c82bf7c445513720cbadc19e3682a"} Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.976774 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bf969cb77-2nqww" Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.986994 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bf969cb77-2nqww" event={"ID":"64681b0b-abc7-459f-858a-a1e8a8ec168c","Type":"ContainerDied","Data":"f17d874c91934404d2dede1c11b760032753b5b2dd81cd24d527434cf38a5740"} Dec 10 07:11:48 crc kubenswrapper[4765]: I1210 07:11:48.987076 4765 scope.go:117] "RemoveContainer" containerID="ffa9d3523866a55382d9fc919655059179659d86fd060a8be0e8fc5c216d66a7" Dec 10 07:11:49 crc kubenswrapper[4765]: I1210 07:11:49.019887 4765 scope.go:117] "RemoveContainer" containerID="5b696a757ba6596f45612b80c4727d93793afe545b90da475c2245fe1f81eb6c" Dec 10 07:11:49 crc kubenswrapper[4765]: I1210 07:11:49.022320 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bf969cb77-2nqww"] Dec 10 07:11:49 crc kubenswrapper[4765]: I1210 07:11:49.035018 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bf969cb77-2nqww"] Dec 10 07:11:49 crc kubenswrapper[4765]: I1210 07:11:49.084124 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-m9wx8"] Dec 10 07:11:49 crc kubenswrapper[4765]: W1210 07:11:49.089162 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3bc06a2a_9f77_44e1_9b16_db768a97c7f7.slice/crio-0e990b588e279efc15fa6d6990bebd189bda7e679f0535c487b0e01a7a0ef5ab WatchSource:0}: Error finding container 0e990b588e279efc15fa6d6990bebd189bda7e679f0535c487b0e01a7a0ef5ab: Status 404 returned error can't find the container with id 0e990b588e279efc15fa6d6990bebd189bda7e679f0535c487b0e01a7a0ef5ab Dec 10 07:11:49 crc kubenswrapper[4765]: I1210 07:11:49.111750 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-jdzvb" podUID="189d9632-f5f0-4a80-9eda-e431f79f2cdc" containerName="registry-server" probeResult="failure" output=< Dec 10 07:11:49 crc kubenswrapper[4765]: timeout: failed to connect service ":50051" within 1s Dec 10 07:11:49 crc kubenswrapper[4765]: > Dec 10 07:11:49 crc kubenswrapper[4765]: I1210 07:11:49.986447 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-m9wx8" event={"ID":"3bc06a2a-9f77-44e1-9b16-db768a97c7f7","Type":"ContainerStarted","Data":"b256bba4f0b940452d87c1efa2568615b300a7d4a10c4b13e4fec042905b6c14"} Dec 10 07:11:49 crc kubenswrapper[4765]: I1210 07:11:49.986783 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-m9wx8" event={"ID":"3bc06a2a-9f77-44e1-9b16-db768a97c7f7","Type":"ContainerStarted","Data":"0e990b588e279efc15fa6d6990bebd189bda7e679f0535c487b0e01a7a0ef5ab"} Dec 10 07:11:49 crc kubenswrapper[4765]: I1210 07:11:49.989826 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8847d4d-d633-4ca3-90fa-3384e525864d","Type":"ContainerStarted","Data":"0f83ede292c143b8abe040b9c236b324a83ebd2fc80b7a5c8ba399301b7c6a04"} Dec 10 07:11:49 crc kubenswrapper[4765]: I1210 07:11:49.989867 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8847d4d-d633-4ca3-90fa-3384e525864d","Type":"ContainerStarted","Data":"38e7778e406b7c1c45d7bce71effadc0372bfa9af421a74aaf2eef133262aa8c"} Dec 10 07:11:50 crc kubenswrapper[4765]: I1210 07:11:50.012725 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-m9wx8" podStartSLOduration=2.01270566 podStartE2EDuration="2.01270566s" podCreationTimestamp="2025-12-10 07:11:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:11:50.010153768 +0000 UTC m=+1429.736819084" watchObservedRunningTime="2025-12-10 07:11:50.01270566 +0000 UTC m=+1429.739370976" Dec 10 07:11:50 crc kubenswrapper[4765]: I1210 07:11:50.601679 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64681b0b-abc7-459f-858a-a1e8a8ec168c" path="/var/lib/kubelet/pods/64681b0b-abc7-459f-858a-a1e8a8ec168c/volumes" Dec 10 07:11:51 crc kubenswrapper[4765]: I1210 07:11:51.000725 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8847d4d-d633-4ca3-90fa-3384e525864d","Type":"ContainerStarted","Data":"eff0f8e0952a591ccab8adaf9372a01139e40a5b801b999a936af80ed6f7fd8e"} Dec 10 07:11:52 crc kubenswrapper[4765]: I1210 07:11:52.015235 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8847d4d-d633-4ca3-90fa-3384e525864d","Type":"ContainerStarted","Data":"5c25e5022d3f67b2367a689a9cf70865a17087f11b9b9853173e30c48e270b2d"} Dec 10 07:11:52 crc kubenswrapper[4765]: I1210 07:11:52.016231 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 07:11:52 crc kubenswrapper[4765]: I1210 07:11:52.045785 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.576250378 podStartE2EDuration="6.045766038s" podCreationTimestamp="2025-12-10 07:11:46 +0000 UTC" firstStartedPulling="2025-12-10 07:11:47.961009029 +0000 UTC m=+1427.687674355" lastFinishedPulling="2025-12-10 07:11:51.430524699 +0000 UTC m=+1431.157190015" observedRunningTime="2025-12-10 07:11:52.043790622 +0000 UTC m=+1431.770455938" watchObservedRunningTime="2025-12-10 07:11:52.045766038 +0000 UTC m=+1431.772431354" Dec 10 07:11:55 crc kubenswrapper[4765]: I1210 07:11:55.049150 4765 generic.go:334] "Generic (PLEG): container finished" podID="3bc06a2a-9f77-44e1-9b16-db768a97c7f7" containerID="b256bba4f0b940452d87c1efa2568615b300a7d4a10c4b13e4fec042905b6c14" exitCode=0 Dec 10 07:11:55 crc kubenswrapper[4765]: I1210 07:11:55.049230 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-m9wx8" event={"ID":"3bc06a2a-9f77-44e1-9b16-db768a97c7f7","Type":"ContainerDied","Data":"b256bba4f0b940452d87c1efa2568615b300a7d4a10c4b13e4fec042905b6c14"} Dec 10 07:11:55 crc kubenswrapper[4765]: I1210 07:11:55.575867 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 07:11:55 crc kubenswrapper[4765]: I1210 07:11:55.576233 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 07:11:56 crc kubenswrapper[4765]: I1210 07:11:56.450067 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-m9wx8" Dec 10 07:11:56 crc kubenswrapper[4765]: I1210 07:11:56.589262 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="dd082226-18a8-469e-b28a-7094701112a3" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.197:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 10 07:11:56 crc kubenswrapper[4765]: I1210 07:11:56.589277 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="dd082226-18a8-469e-b28a-7094701112a3" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.197:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 10 07:11:56 crc kubenswrapper[4765]: I1210 07:11:56.593478 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-scripts\") pod \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\" (UID: \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\") " Dec 10 07:11:56 crc kubenswrapper[4765]: I1210 07:11:56.593513 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-config-data\") pod \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\" (UID: \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\") " Dec 10 07:11:56 crc kubenswrapper[4765]: I1210 07:11:56.593545 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-combined-ca-bundle\") pod \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\" (UID: \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\") " Dec 10 07:11:56 crc kubenswrapper[4765]: I1210 07:11:56.593678 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fjh5\" (UniqueName: \"kubernetes.io/projected/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-kube-api-access-6fjh5\") pod \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\" (UID: \"3bc06a2a-9f77-44e1-9b16-db768a97c7f7\") " Dec 10 07:11:56 crc kubenswrapper[4765]: I1210 07:11:56.605137 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-scripts" (OuterVolumeSpecName: "scripts") pod "3bc06a2a-9f77-44e1-9b16-db768a97c7f7" (UID: "3bc06a2a-9f77-44e1-9b16-db768a97c7f7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:56 crc kubenswrapper[4765]: I1210 07:11:56.605337 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-kube-api-access-6fjh5" (OuterVolumeSpecName: "kube-api-access-6fjh5") pod "3bc06a2a-9f77-44e1-9b16-db768a97c7f7" (UID: "3bc06a2a-9f77-44e1-9b16-db768a97c7f7"). InnerVolumeSpecName "kube-api-access-6fjh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:11:56 crc kubenswrapper[4765]: I1210 07:11:56.626553 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3bc06a2a-9f77-44e1-9b16-db768a97c7f7" (UID: "3bc06a2a-9f77-44e1-9b16-db768a97c7f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:56 crc kubenswrapper[4765]: I1210 07:11:56.628339 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-config-data" (OuterVolumeSpecName: "config-data") pod "3bc06a2a-9f77-44e1-9b16-db768a97c7f7" (UID: "3bc06a2a-9f77-44e1-9b16-db768a97c7f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:56 crc kubenswrapper[4765]: I1210 07:11:56.696058 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:56 crc kubenswrapper[4765]: I1210 07:11:56.696117 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:56 crc kubenswrapper[4765]: I1210 07:11:56.696133 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:56 crc kubenswrapper[4765]: I1210 07:11:56.696147 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fjh5\" (UniqueName: \"kubernetes.io/projected/3bc06a2a-9f77-44e1-9b16-db768a97c7f7-kube-api-access-6fjh5\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:57 crc kubenswrapper[4765]: I1210 07:11:57.123893 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-m9wx8" event={"ID":"3bc06a2a-9f77-44e1-9b16-db768a97c7f7","Type":"ContainerDied","Data":"0e990b588e279efc15fa6d6990bebd189bda7e679f0535c487b0e01a7a0ef5ab"} Dec 10 07:11:57 crc kubenswrapper[4765]: I1210 07:11:57.123939 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e990b588e279efc15fa6d6990bebd189bda7e679f0535c487b0e01a7a0ef5ab" Dec 10 07:11:57 crc kubenswrapper[4765]: I1210 07:11:57.124052 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-m9wx8" Dec 10 07:11:57 crc kubenswrapper[4765]: I1210 07:11:57.262153 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:11:57 crc kubenswrapper[4765]: I1210 07:11:57.262734 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="dd082226-18a8-469e-b28a-7094701112a3" containerName="nova-api-log" containerID="cri-o://cafc595c3b12efb85ed1729282dd231efbf831beff168907f4af848da759300c" gracePeriod=30 Dec 10 07:11:57 crc kubenswrapper[4765]: I1210 07:11:57.262777 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="dd082226-18a8-469e-b28a-7094701112a3" containerName="nova-api-api" containerID="cri-o://c2f77e8fde94e071cc777030ff6e5d92b09df03d32b3b8fc09589c5858266722" gracePeriod=30 Dec 10 07:11:57 crc kubenswrapper[4765]: I1210 07:11:57.277491 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 07:11:57 crc kubenswrapper[4765]: I1210 07:11:57.277785 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="44d47192-50c3-4b82-b4c3-8d5c36888fe4" containerName="nova-scheduler-scheduler" containerID="cri-o://6ef96d34cd0172617e3243fbef31a88436029b89f059c3a50d3f1b091843a1be" gracePeriod=30 Dec 10 07:11:57 crc kubenswrapper[4765]: I1210 07:11:57.325565 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 07:11:57 crc kubenswrapper[4765]: I1210 07:11:57.325862 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c2d71920-5051-42a8-aada-f45a23df5463" containerName="nova-metadata-log" containerID="cri-o://c2d05fd1c4884de85935e64eb2547089375ca4199364a46ed8db39a1c61b292b" gracePeriod=30 Dec 10 07:11:57 crc kubenswrapper[4765]: I1210 07:11:57.325944 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c2d71920-5051-42a8-aada-f45a23df5463" containerName="nova-metadata-metadata" containerID="cri-o://10274cb3f949c350460665760d5212cdcdd1f4de21a6f6d4dd473a591b2d9712" gracePeriod=30 Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.071625 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jdzvb" Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.138885 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jdzvb" Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.143979 4765 generic.go:334] "Generic (PLEG): container finished" podID="dd082226-18a8-469e-b28a-7094701112a3" containerID="cafc595c3b12efb85ed1729282dd231efbf831beff168907f4af848da759300c" exitCode=143 Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.144203 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dd082226-18a8-469e-b28a-7094701112a3","Type":"ContainerDied","Data":"cafc595c3b12efb85ed1729282dd231efbf831beff168907f4af848da759300c"} Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.147235 4765 generic.go:334] "Generic (PLEG): container finished" podID="c2d71920-5051-42a8-aada-f45a23df5463" containerID="c2d05fd1c4884de85935e64eb2547089375ca4199364a46ed8db39a1c61b292b" exitCode=143 Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.147306 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c2d71920-5051-42a8-aada-f45a23df5463","Type":"ContainerDied","Data":"c2d05fd1c4884de85935e64eb2547089375ca4199364a46ed8db39a1c61b292b"} Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.150641 4765 generic.go:334] "Generic (PLEG): container finished" podID="44d47192-50c3-4b82-b4c3-8d5c36888fe4" containerID="6ef96d34cd0172617e3243fbef31a88436029b89f059c3a50d3f1b091843a1be" exitCode=0 Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.151698 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"44d47192-50c3-4b82-b4c3-8d5c36888fe4","Type":"ContainerDied","Data":"6ef96d34cd0172617e3243fbef31a88436029b89f059c3a50d3f1b091843a1be"} Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.325281 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jdzvb"] Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.503327 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.646268 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcnsx\" (UniqueName: \"kubernetes.io/projected/44d47192-50c3-4b82-b4c3-8d5c36888fe4-kube-api-access-wcnsx\") pod \"44d47192-50c3-4b82-b4c3-8d5c36888fe4\" (UID: \"44d47192-50c3-4b82-b4c3-8d5c36888fe4\") " Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.646318 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44d47192-50c3-4b82-b4c3-8d5c36888fe4-config-data\") pod \"44d47192-50c3-4b82-b4c3-8d5c36888fe4\" (UID: \"44d47192-50c3-4b82-b4c3-8d5c36888fe4\") " Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.646557 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44d47192-50c3-4b82-b4c3-8d5c36888fe4-combined-ca-bundle\") pod \"44d47192-50c3-4b82-b4c3-8d5c36888fe4\" (UID: \"44d47192-50c3-4b82-b4c3-8d5c36888fe4\") " Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.669535 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44d47192-50c3-4b82-b4c3-8d5c36888fe4-kube-api-access-wcnsx" (OuterVolumeSpecName: "kube-api-access-wcnsx") pod "44d47192-50c3-4b82-b4c3-8d5c36888fe4" (UID: "44d47192-50c3-4b82-b4c3-8d5c36888fe4"). InnerVolumeSpecName "kube-api-access-wcnsx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.688394 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44d47192-50c3-4b82-b4c3-8d5c36888fe4-config-data" (OuterVolumeSpecName: "config-data") pod "44d47192-50c3-4b82-b4c3-8d5c36888fe4" (UID: "44d47192-50c3-4b82-b4c3-8d5c36888fe4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.691907 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44d47192-50c3-4b82-b4c3-8d5c36888fe4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "44d47192-50c3-4b82-b4c3-8d5c36888fe4" (UID: "44d47192-50c3-4b82-b4c3-8d5c36888fe4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.748613 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44d47192-50c3-4b82-b4c3-8d5c36888fe4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.748957 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcnsx\" (UniqueName: \"kubernetes.io/projected/44d47192-50c3-4b82-b4c3-8d5c36888fe4-kube-api-access-wcnsx\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:58 crc kubenswrapper[4765]: I1210 07:11:58.748973 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44d47192-50c3-4b82-b4c3-8d5c36888fe4-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.161860 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.161854 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"44d47192-50c3-4b82-b4c3-8d5c36888fe4","Type":"ContainerDied","Data":"f5dd8a19133fa98e6e51a882d2c18aae423c16be5d4077c631e6eb8221f51893"} Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.161929 4765 scope.go:117] "RemoveContainer" containerID="6ef96d34cd0172617e3243fbef31a88436029b89f059c3a50d3f1b091843a1be" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.162028 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jdzvb" podUID="189d9632-f5f0-4a80-9eda-e431f79f2cdc" containerName="registry-server" containerID="cri-o://c042dcbdbf676416a63b08eacde076020ffb2aed98e29760a45f884ad8417a83" gracePeriod=2 Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.203213 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.217796 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.233378 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 07:11:59 crc kubenswrapper[4765]: E1210 07:11:59.233836 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64681b0b-abc7-459f-858a-a1e8a8ec168c" containerName="init" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.233856 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="64681b0b-abc7-459f-858a-a1e8a8ec168c" containerName="init" Dec 10 07:11:59 crc kubenswrapper[4765]: E1210 07:11:59.233884 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bc06a2a-9f77-44e1-9b16-db768a97c7f7" containerName="nova-manage" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.233891 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bc06a2a-9f77-44e1-9b16-db768a97c7f7" containerName="nova-manage" Dec 10 07:11:59 crc kubenswrapper[4765]: E1210 07:11:59.233904 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44d47192-50c3-4b82-b4c3-8d5c36888fe4" containerName="nova-scheduler-scheduler" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.233910 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="44d47192-50c3-4b82-b4c3-8d5c36888fe4" containerName="nova-scheduler-scheduler" Dec 10 07:11:59 crc kubenswrapper[4765]: E1210 07:11:59.233921 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64681b0b-abc7-459f-858a-a1e8a8ec168c" containerName="dnsmasq-dns" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.233927 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="64681b0b-abc7-459f-858a-a1e8a8ec168c" containerName="dnsmasq-dns" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.234167 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bc06a2a-9f77-44e1-9b16-db768a97c7f7" containerName="nova-manage" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.234197 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="64681b0b-abc7-459f-858a-a1e8a8ec168c" containerName="dnsmasq-dns" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.234218 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="44d47192-50c3-4b82-b4c3-8d5c36888fe4" containerName="nova-scheduler-scheduler" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.234945 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.237425 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.243284 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.361253 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d11b24c2-0ac0-4f23-a575-d1b80db4ba11-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d11b24c2-0ac0-4f23-a575-d1b80db4ba11\") " pod="openstack/nova-scheduler-0" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.361578 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78bfd\" (UniqueName: \"kubernetes.io/projected/d11b24c2-0ac0-4f23-a575-d1b80db4ba11-kube-api-access-78bfd\") pod \"nova-scheduler-0\" (UID: \"d11b24c2-0ac0-4f23-a575-d1b80db4ba11\") " pod="openstack/nova-scheduler-0" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.361710 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d11b24c2-0ac0-4f23-a575-d1b80db4ba11-config-data\") pod \"nova-scheduler-0\" (UID: \"d11b24c2-0ac0-4f23-a575-d1b80db4ba11\") " pod="openstack/nova-scheduler-0" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.463899 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d11b24c2-0ac0-4f23-a575-d1b80db4ba11-config-data\") pod \"nova-scheduler-0\" (UID: \"d11b24c2-0ac0-4f23-a575-d1b80db4ba11\") " pod="openstack/nova-scheduler-0" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.464004 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d11b24c2-0ac0-4f23-a575-d1b80db4ba11-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d11b24c2-0ac0-4f23-a575-d1b80db4ba11\") " pod="openstack/nova-scheduler-0" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.464183 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78bfd\" (UniqueName: \"kubernetes.io/projected/d11b24c2-0ac0-4f23-a575-d1b80db4ba11-kube-api-access-78bfd\") pod \"nova-scheduler-0\" (UID: \"d11b24c2-0ac0-4f23-a575-d1b80db4ba11\") " pod="openstack/nova-scheduler-0" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.469199 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d11b24c2-0ac0-4f23-a575-d1b80db4ba11-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d11b24c2-0ac0-4f23-a575-d1b80db4ba11\") " pod="openstack/nova-scheduler-0" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.471854 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d11b24c2-0ac0-4f23-a575-d1b80db4ba11-config-data\") pod \"nova-scheduler-0\" (UID: \"d11b24c2-0ac0-4f23-a575-d1b80db4ba11\") " pod="openstack/nova-scheduler-0" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.483183 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78bfd\" (UniqueName: \"kubernetes.io/projected/d11b24c2-0ac0-4f23-a575-d1b80db4ba11-kube-api-access-78bfd\") pod \"nova-scheduler-0\" (UID: \"d11b24c2-0ac0-4f23-a575-d1b80db4ba11\") " pod="openstack/nova-scheduler-0" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.632202 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jdzvb" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.641516 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.769703 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/189d9632-f5f0-4a80-9eda-e431f79f2cdc-catalog-content\") pod \"189d9632-f5f0-4a80-9eda-e431f79f2cdc\" (UID: \"189d9632-f5f0-4a80-9eda-e431f79f2cdc\") " Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.769810 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-slsj4\" (UniqueName: \"kubernetes.io/projected/189d9632-f5f0-4a80-9eda-e431f79f2cdc-kube-api-access-slsj4\") pod \"189d9632-f5f0-4a80-9eda-e431f79f2cdc\" (UID: \"189d9632-f5f0-4a80-9eda-e431f79f2cdc\") " Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.770059 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/189d9632-f5f0-4a80-9eda-e431f79f2cdc-utilities\") pod \"189d9632-f5f0-4a80-9eda-e431f79f2cdc\" (UID: \"189d9632-f5f0-4a80-9eda-e431f79f2cdc\") " Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.772197 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/189d9632-f5f0-4a80-9eda-e431f79f2cdc-utilities" (OuterVolumeSpecName: "utilities") pod "189d9632-f5f0-4a80-9eda-e431f79f2cdc" (UID: "189d9632-f5f0-4a80-9eda-e431f79f2cdc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.776292 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/189d9632-f5f0-4a80-9eda-e431f79f2cdc-kube-api-access-slsj4" (OuterVolumeSpecName: "kube-api-access-slsj4") pod "189d9632-f5f0-4a80-9eda-e431f79f2cdc" (UID: "189d9632-f5f0-4a80-9eda-e431f79f2cdc"). InnerVolumeSpecName "kube-api-access-slsj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.872934 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/189d9632-f5f0-4a80-9eda-e431f79f2cdc-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.873048 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-slsj4\" (UniqueName: \"kubernetes.io/projected/189d9632-f5f0-4a80-9eda-e431f79f2cdc-kube-api-access-slsj4\") on node \"crc\" DevicePath \"\"" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.887064 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/189d9632-f5f0-4a80-9eda-e431f79f2cdc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "189d9632-f5f0-4a80-9eda-e431f79f2cdc" (UID: "189d9632-f5f0-4a80-9eda-e431f79f2cdc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:11:59 crc kubenswrapper[4765]: I1210 07:11:59.975376 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/189d9632-f5f0-4a80-9eda-e431f79f2cdc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.128610 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.173738 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d11b24c2-0ac0-4f23-a575-d1b80db4ba11","Type":"ContainerStarted","Data":"25d20b42efcbb9318336c3e6f68eddac50d9caae7887edef1e9bf6045ca41622"} Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.176297 4765 generic.go:334] "Generic (PLEG): container finished" podID="189d9632-f5f0-4a80-9eda-e431f79f2cdc" containerID="c042dcbdbf676416a63b08eacde076020ffb2aed98e29760a45f884ad8417a83" exitCode=0 Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.176343 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdzvb" event={"ID":"189d9632-f5f0-4a80-9eda-e431f79f2cdc","Type":"ContainerDied","Data":"c042dcbdbf676416a63b08eacde076020ffb2aed98e29760a45f884ad8417a83"} Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.176366 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jdzvb" Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.176386 4765 scope.go:117] "RemoveContainer" containerID="c042dcbdbf676416a63b08eacde076020ffb2aed98e29760a45f884ad8417a83" Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.176372 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdzvb" event={"ID":"189d9632-f5f0-4a80-9eda-e431f79f2cdc","Type":"ContainerDied","Data":"b5075a6a5db31c72b69a72539b14d95d6311f283ff571ea76d769ec6616c7b1d"} Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.203288 4765 scope.go:117] "RemoveContainer" containerID="7303b41db854844fa2922dc4d1781ece6e68ea10d0891f70cb406657ea6fa73b" Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.235400 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jdzvb"] Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.247316 4765 scope.go:117] "RemoveContainer" containerID="c98cb39f2737aa048a968ecbf395bca52aa3b6132e57ba68ac63d54b6968b581" Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.251568 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jdzvb"] Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.275099 4765 scope.go:117] "RemoveContainer" containerID="c042dcbdbf676416a63b08eacde076020ffb2aed98e29760a45f884ad8417a83" Dec 10 07:12:00 crc kubenswrapper[4765]: E1210 07:12:00.275597 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c042dcbdbf676416a63b08eacde076020ffb2aed98e29760a45f884ad8417a83\": container with ID starting with c042dcbdbf676416a63b08eacde076020ffb2aed98e29760a45f884ad8417a83 not found: ID does not exist" containerID="c042dcbdbf676416a63b08eacde076020ffb2aed98e29760a45f884ad8417a83" Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.275639 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c042dcbdbf676416a63b08eacde076020ffb2aed98e29760a45f884ad8417a83"} err="failed to get container status \"c042dcbdbf676416a63b08eacde076020ffb2aed98e29760a45f884ad8417a83\": rpc error: code = NotFound desc = could not find container \"c042dcbdbf676416a63b08eacde076020ffb2aed98e29760a45f884ad8417a83\": container with ID starting with c042dcbdbf676416a63b08eacde076020ffb2aed98e29760a45f884ad8417a83 not found: ID does not exist" Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.275667 4765 scope.go:117] "RemoveContainer" containerID="7303b41db854844fa2922dc4d1781ece6e68ea10d0891f70cb406657ea6fa73b" Dec 10 07:12:00 crc kubenswrapper[4765]: E1210 07:12:00.275994 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7303b41db854844fa2922dc4d1781ece6e68ea10d0891f70cb406657ea6fa73b\": container with ID starting with 7303b41db854844fa2922dc4d1781ece6e68ea10d0891f70cb406657ea6fa73b not found: ID does not exist" containerID="7303b41db854844fa2922dc4d1781ece6e68ea10d0891f70cb406657ea6fa73b" Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.276022 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7303b41db854844fa2922dc4d1781ece6e68ea10d0891f70cb406657ea6fa73b"} err="failed to get container status \"7303b41db854844fa2922dc4d1781ece6e68ea10d0891f70cb406657ea6fa73b\": rpc error: code = NotFound desc = could not find container \"7303b41db854844fa2922dc4d1781ece6e68ea10d0891f70cb406657ea6fa73b\": container with ID starting with 7303b41db854844fa2922dc4d1781ece6e68ea10d0891f70cb406657ea6fa73b not found: ID does not exist" Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.276040 4765 scope.go:117] "RemoveContainer" containerID="c98cb39f2737aa048a968ecbf395bca52aa3b6132e57ba68ac63d54b6968b581" Dec 10 07:12:00 crc kubenswrapper[4765]: E1210 07:12:00.276328 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c98cb39f2737aa048a968ecbf395bca52aa3b6132e57ba68ac63d54b6968b581\": container with ID starting with c98cb39f2737aa048a968ecbf395bca52aa3b6132e57ba68ac63d54b6968b581 not found: ID does not exist" containerID="c98cb39f2737aa048a968ecbf395bca52aa3b6132e57ba68ac63d54b6968b581" Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.276357 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c98cb39f2737aa048a968ecbf395bca52aa3b6132e57ba68ac63d54b6968b581"} err="failed to get container status \"c98cb39f2737aa048a968ecbf395bca52aa3b6132e57ba68ac63d54b6968b581\": rpc error: code = NotFound desc = could not find container \"c98cb39f2737aa048a968ecbf395bca52aa3b6132e57ba68ac63d54b6968b581\": container with ID starting with c98cb39f2737aa048a968ecbf395bca52aa3b6132e57ba68ac63d54b6968b581 not found: ID does not exist" Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.602186 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="189d9632-f5f0-4a80-9eda-e431f79f2cdc" path="/var/lib/kubelet/pods/189d9632-f5f0-4a80-9eda-e431f79f2cdc/volumes" Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.603456 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44d47192-50c3-4b82-b4c3-8d5c36888fe4" path="/var/lib/kubelet/pods/44d47192-50c3-4b82-b4c3-8d5c36888fe4/volumes" Dec 10 07:12:00 crc kubenswrapper[4765]: I1210 07:12:00.911621 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.012028 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2d71920-5051-42a8-aada-f45a23df5463-combined-ca-bundle\") pod \"c2d71920-5051-42a8-aada-f45a23df5463\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.012169 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5mnv9\" (UniqueName: \"kubernetes.io/projected/c2d71920-5051-42a8-aada-f45a23df5463-kube-api-access-5mnv9\") pod \"c2d71920-5051-42a8-aada-f45a23df5463\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.012208 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2d71920-5051-42a8-aada-f45a23df5463-logs\") pod \"c2d71920-5051-42a8-aada-f45a23df5463\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.012322 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2d71920-5051-42a8-aada-f45a23df5463-config-data\") pod \"c2d71920-5051-42a8-aada-f45a23df5463\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.012379 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2d71920-5051-42a8-aada-f45a23df5463-nova-metadata-tls-certs\") pod \"c2d71920-5051-42a8-aada-f45a23df5463\" (UID: \"c2d71920-5051-42a8-aada-f45a23df5463\") " Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.012666 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2d71920-5051-42a8-aada-f45a23df5463-logs" (OuterVolumeSpecName: "logs") pod "c2d71920-5051-42a8-aada-f45a23df5463" (UID: "c2d71920-5051-42a8-aada-f45a23df5463"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.012956 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2d71920-5051-42a8-aada-f45a23df5463-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.017292 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2d71920-5051-42a8-aada-f45a23df5463-kube-api-access-5mnv9" (OuterVolumeSpecName: "kube-api-access-5mnv9") pod "c2d71920-5051-42a8-aada-f45a23df5463" (UID: "c2d71920-5051-42a8-aada-f45a23df5463"). InnerVolumeSpecName "kube-api-access-5mnv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.042691 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2d71920-5051-42a8-aada-f45a23df5463-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c2d71920-5051-42a8-aada-f45a23df5463" (UID: "c2d71920-5051-42a8-aada-f45a23df5463"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.063298 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2d71920-5051-42a8-aada-f45a23df5463-config-data" (OuterVolumeSpecName: "config-data") pod "c2d71920-5051-42a8-aada-f45a23df5463" (UID: "c2d71920-5051-42a8-aada-f45a23df5463"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.064342 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2d71920-5051-42a8-aada-f45a23df5463-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "c2d71920-5051-42a8-aada-f45a23df5463" (UID: "c2d71920-5051-42a8-aada-f45a23df5463"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.114206 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2d71920-5051-42a8-aada-f45a23df5463-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.114437 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5mnv9\" (UniqueName: \"kubernetes.io/projected/c2d71920-5051-42a8-aada-f45a23df5463-kube-api-access-5mnv9\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.114451 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2d71920-5051-42a8-aada-f45a23df5463-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.114460 4765 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2d71920-5051-42a8-aada-f45a23df5463-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.194296 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d11b24c2-0ac0-4f23-a575-d1b80db4ba11","Type":"ContainerStarted","Data":"48722ecb98b55b709c43a7cab76c40a65934728cb7609e6246ba369957cdbe37"} Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.197782 4765 generic.go:334] "Generic (PLEG): container finished" podID="c2d71920-5051-42a8-aada-f45a23df5463" containerID="10274cb3f949c350460665760d5212cdcdd1f4de21a6f6d4dd473a591b2d9712" exitCode=0 Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.197817 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c2d71920-5051-42a8-aada-f45a23df5463","Type":"ContainerDied","Data":"10274cb3f949c350460665760d5212cdcdd1f4de21a6f6d4dd473a591b2d9712"} Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.197826 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.197852 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c2d71920-5051-42a8-aada-f45a23df5463","Type":"ContainerDied","Data":"556e82ba84fca4b26853a37b03009af24083a54362bc32b66be81548f4d72891"} Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.197871 4765 scope.go:117] "RemoveContainer" containerID="10274cb3f949c350460665760d5212cdcdd1f4de21a6f6d4dd473a591b2d9712" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.236500 4765 scope.go:117] "RemoveContainer" containerID="c2d05fd1c4884de85935e64eb2547089375ca4199364a46ed8db39a1c61b292b" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.241054 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.241031763 podStartE2EDuration="2.241031763s" podCreationTimestamp="2025-12-10 07:11:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:12:01.213365076 +0000 UTC m=+1440.940030392" watchObservedRunningTime="2025-12-10 07:12:01.241031763 +0000 UTC m=+1440.967697099" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.246120 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.257645 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.264772 4765 scope.go:117] "RemoveContainer" containerID="10274cb3f949c350460665760d5212cdcdd1f4de21a6f6d4dd473a591b2d9712" Dec 10 07:12:01 crc kubenswrapper[4765]: E1210 07:12:01.265479 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10274cb3f949c350460665760d5212cdcdd1f4de21a6f6d4dd473a591b2d9712\": container with ID starting with 10274cb3f949c350460665760d5212cdcdd1f4de21a6f6d4dd473a591b2d9712 not found: ID does not exist" containerID="10274cb3f949c350460665760d5212cdcdd1f4de21a6f6d4dd473a591b2d9712" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.265526 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10274cb3f949c350460665760d5212cdcdd1f4de21a6f6d4dd473a591b2d9712"} err="failed to get container status \"10274cb3f949c350460665760d5212cdcdd1f4de21a6f6d4dd473a591b2d9712\": rpc error: code = NotFound desc = could not find container \"10274cb3f949c350460665760d5212cdcdd1f4de21a6f6d4dd473a591b2d9712\": container with ID starting with 10274cb3f949c350460665760d5212cdcdd1f4de21a6f6d4dd473a591b2d9712 not found: ID does not exist" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.265553 4765 scope.go:117] "RemoveContainer" containerID="c2d05fd1c4884de85935e64eb2547089375ca4199364a46ed8db39a1c61b292b" Dec 10 07:12:01 crc kubenswrapper[4765]: E1210 07:12:01.266136 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2d05fd1c4884de85935e64eb2547089375ca4199364a46ed8db39a1c61b292b\": container with ID starting with c2d05fd1c4884de85935e64eb2547089375ca4199364a46ed8db39a1c61b292b not found: ID does not exist" containerID="c2d05fd1c4884de85935e64eb2547089375ca4199364a46ed8db39a1c61b292b" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.266190 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2d05fd1c4884de85935e64eb2547089375ca4199364a46ed8db39a1c61b292b"} err="failed to get container status \"c2d05fd1c4884de85935e64eb2547089375ca4199364a46ed8db39a1c61b292b\": rpc error: code = NotFound desc = could not find container \"c2d05fd1c4884de85935e64eb2547089375ca4199364a46ed8db39a1c61b292b\": container with ID starting with c2d05fd1c4884de85935e64eb2547089375ca4199364a46ed8db39a1c61b292b not found: ID does not exist" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.269977 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 10 07:12:01 crc kubenswrapper[4765]: E1210 07:12:01.270632 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="189d9632-f5f0-4a80-9eda-e431f79f2cdc" containerName="extract-content" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.270655 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="189d9632-f5f0-4a80-9eda-e431f79f2cdc" containerName="extract-content" Dec 10 07:12:01 crc kubenswrapper[4765]: E1210 07:12:01.270673 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2d71920-5051-42a8-aada-f45a23df5463" containerName="nova-metadata-log" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.270684 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2d71920-5051-42a8-aada-f45a23df5463" containerName="nova-metadata-log" Dec 10 07:12:01 crc kubenswrapper[4765]: E1210 07:12:01.270702 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="189d9632-f5f0-4a80-9eda-e431f79f2cdc" containerName="registry-server" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.270710 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="189d9632-f5f0-4a80-9eda-e431f79f2cdc" containerName="registry-server" Dec 10 07:12:01 crc kubenswrapper[4765]: E1210 07:12:01.270722 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="189d9632-f5f0-4a80-9eda-e431f79f2cdc" containerName="extract-utilities" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.270730 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="189d9632-f5f0-4a80-9eda-e431f79f2cdc" containerName="extract-utilities" Dec 10 07:12:01 crc kubenswrapper[4765]: E1210 07:12:01.270744 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2d71920-5051-42a8-aada-f45a23df5463" containerName="nova-metadata-metadata" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.270750 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2d71920-5051-42a8-aada-f45a23df5463" containerName="nova-metadata-metadata" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.270984 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2d71920-5051-42a8-aada-f45a23df5463" containerName="nova-metadata-metadata" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.271003 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="189d9632-f5f0-4a80-9eda-e431f79f2cdc" containerName="registry-server" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.271075 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2d71920-5051-42a8-aada-f45a23df5463" containerName="nova-metadata-log" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.272455 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.275656 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.275827 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.281261 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.421230 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " pod="openstack/nova-metadata-0" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.421323 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " pod="openstack/nova-metadata-0" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.421353 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9cwl\" (UniqueName: \"kubernetes.io/projected/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-kube-api-access-g9cwl\") pod \"nova-metadata-0\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " pod="openstack/nova-metadata-0" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.421431 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-logs\") pod \"nova-metadata-0\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " pod="openstack/nova-metadata-0" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.421461 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-config-data\") pod \"nova-metadata-0\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " pod="openstack/nova-metadata-0" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.523172 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-config-data\") pod \"nova-metadata-0\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " pod="openstack/nova-metadata-0" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.523256 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " pod="openstack/nova-metadata-0" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.523319 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " pod="openstack/nova-metadata-0" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.523345 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9cwl\" (UniqueName: \"kubernetes.io/projected/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-kube-api-access-g9cwl\") pod \"nova-metadata-0\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " pod="openstack/nova-metadata-0" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.523419 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-logs\") pod \"nova-metadata-0\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " pod="openstack/nova-metadata-0" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.523856 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-logs\") pod \"nova-metadata-0\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " pod="openstack/nova-metadata-0" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.528982 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " pod="openstack/nova-metadata-0" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.529512 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-config-data\") pod \"nova-metadata-0\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " pod="openstack/nova-metadata-0" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.529781 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " pod="openstack/nova-metadata-0" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.541846 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9cwl\" (UniqueName: \"kubernetes.io/projected/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-kube-api-access-g9cwl\") pod \"nova-metadata-0\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " pod="openstack/nova-metadata-0" Dec 10 07:12:01 crc kubenswrapper[4765]: I1210 07:12:01.590890 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.062139 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: W1210 07:12:02.081337 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a5f12df_98c4_4ab8_af81_e1b8f2067fcc.slice/crio-12468ee67d4418f28062a3254399c17eb47a78c5eedd56e8ee6011bccb953549 WatchSource:0}: Error finding container 12468ee67d4418f28062a3254399c17eb47a78c5eedd56e8ee6011bccb953549: Status 404 returned error can't find the container with id 12468ee67d4418f28062a3254399c17eb47a78c5eedd56e8ee6011bccb953549 Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.081932 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.227331 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc","Type":"ContainerStarted","Data":"12468ee67d4418f28062a3254399c17eb47a78c5eedd56e8ee6011bccb953549"} Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.229865 4765 generic.go:334] "Generic (PLEG): container finished" podID="dd082226-18a8-469e-b28a-7094701112a3" containerID="c2f77e8fde94e071cc777030ff6e5d92b09df03d32b3b8fc09589c5858266722" exitCode=0 Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.229947 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dd082226-18a8-469e-b28a-7094701112a3","Type":"ContainerDied","Data":"c2f77e8fde94e071cc777030ff6e5d92b09df03d32b3b8fc09589c5858266722"} Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.229952 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.229998 4765 scope.go:117] "RemoveContainer" containerID="c2f77e8fde94e071cc777030ff6e5d92b09df03d32b3b8fc09589c5858266722" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.229983 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dd082226-18a8-469e-b28a-7094701112a3","Type":"ContainerDied","Data":"096c7579da5f4ca9abb2490dc570c0904dc206107fb34c8786e72a661fedc8d2"} Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.243640 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-internal-tls-certs\") pod \"dd082226-18a8-469e-b28a-7094701112a3\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.243714 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-config-data\") pod \"dd082226-18a8-469e-b28a-7094701112a3\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.243754 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-combined-ca-bundle\") pod \"dd082226-18a8-469e-b28a-7094701112a3\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.244121 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fq28m\" (UniqueName: \"kubernetes.io/projected/dd082226-18a8-469e-b28a-7094701112a3-kube-api-access-fq28m\") pod \"dd082226-18a8-469e-b28a-7094701112a3\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.244228 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-public-tls-certs\") pod \"dd082226-18a8-469e-b28a-7094701112a3\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.244298 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd082226-18a8-469e-b28a-7094701112a3-logs\") pod \"dd082226-18a8-469e-b28a-7094701112a3\" (UID: \"dd082226-18a8-469e-b28a-7094701112a3\") " Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.244724 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd082226-18a8-469e-b28a-7094701112a3-logs" (OuterVolumeSpecName: "logs") pod "dd082226-18a8-469e-b28a-7094701112a3" (UID: "dd082226-18a8-469e-b28a-7094701112a3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.245332 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd082226-18a8-469e-b28a-7094701112a3-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.249466 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd082226-18a8-469e-b28a-7094701112a3-kube-api-access-fq28m" (OuterVolumeSpecName: "kube-api-access-fq28m") pod "dd082226-18a8-469e-b28a-7094701112a3" (UID: "dd082226-18a8-469e-b28a-7094701112a3"). InnerVolumeSpecName "kube-api-access-fq28m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.251777 4765 scope.go:117] "RemoveContainer" containerID="cafc595c3b12efb85ed1729282dd231efbf831beff168907f4af848da759300c" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.271484 4765 scope.go:117] "RemoveContainer" containerID="c2f77e8fde94e071cc777030ff6e5d92b09df03d32b3b8fc09589c5858266722" Dec 10 07:12:02 crc kubenswrapper[4765]: E1210 07:12:02.271853 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2f77e8fde94e071cc777030ff6e5d92b09df03d32b3b8fc09589c5858266722\": container with ID starting with c2f77e8fde94e071cc777030ff6e5d92b09df03d32b3b8fc09589c5858266722 not found: ID does not exist" containerID="c2f77e8fde94e071cc777030ff6e5d92b09df03d32b3b8fc09589c5858266722" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.271928 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2f77e8fde94e071cc777030ff6e5d92b09df03d32b3b8fc09589c5858266722"} err="failed to get container status \"c2f77e8fde94e071cc777030ff6e5d92b09df03d32b3b8fc09589c5858266722\": rpc error: code = NotFound desc = could not find container \"c2f77e8fde94e071cc777030ff6e5d92b09df03d32b3b8fc09589c5858266722\": container with ID starting with c2f77e8fde94e071cc777030ff6e5d92b09df03d32b3b8fc09589c5858266722 not found: ID does not exist" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.271957 4765 scope.go:117] "RemoveContainer" containerID="cafc595c3b12efb85ed1729282dd231efbf831beff168907f4af848da759300c" Dec 10 07:12:02 crc kubenswrapper[4765]: E1210 07:12:02.272263 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cafc595c3b12efb85ed1729282dd231efbf831beff168907f4af848da759300c\": container with ID starting with cafc595c3b12efb85ed1729282dd231efbf831beff168907f4af848da759300c not found: ID does not exist" containerID="cafc595c3b12efb85ed1729282dd231efbf831beff168907f4af848da759300c" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.272302 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cafc595c3b12efb85ed1729282dd231efbf831beff168907f4af848da759300c"} err="failed to get container status \"cafc595c3b12efb85ed1729282dd231efbf831beff168907f4af848da759300c\": rpc error: code = NotFound desc = could not find container \"cafc595c3b12efb85ed1729282dd231efbf831beff168907f4af848da759300c\": container with ID starting with cafc595c3b12efb85ed1729282dd231efbf831beff168907f4af848da759300c not found: ID does not exist" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.272465 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-config-data" (OuterVolumeSpecName: "config-data") pod "dd082226-18a8-469e-b28a-7094701112a3" (UID: "dd082226-18a8-469e-b28a-7094701112a3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.275066 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dd082226-18a8-469e-b28a-7094701112a3" (UID: "dd082226-18a8-469e-b28a-7094701112a3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.298946 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "dd082226-18a8-469e-b28a-7094701112a3" (UID: "dd082226-18a8-469e-b28a-7094701112a3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.299339 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "dd082226-18a8-469e-b28a-7094701112a3" (UID: "dd082226-18a8-469e-b28a-7094701112a3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.347482 4765 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.347513 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.347522 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.347533 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fq28m\" (UniqueName: \"kubernetes.io/projected/dd082226-18a8-469e-b28a-7094701112a3-kube-api-access-fq28m\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.347544 4765 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd082226-18a8-469e-b28a-7094701112a3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.622568 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2d71920-5051-42a8-aada-f45a23df5463" path="/var/lib/kubelet/pods/c2d71920-5051-42a8-aada-f45a23df5463/volumes" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.652523 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.667167 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.677185 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 10 07:12:02 crc kubenswrapper[4765]: E1210 07:12:02.677767 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd082226-18a8-469e-b28a-7094701112a3" containerName="nova-api-api" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.677792 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd082226-18a8-469e-b28a-7094701112a3" containerName="nova-api-api" Dec 10 07:12:02 crc kubenswrapper[4765]: E1210 07:12:02.677820 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd082226-18a8-469e-b28a-7094701112a3" containerName="nova-api-log" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.677828 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd082226-18a8-469e-b28a-7094701112a3" containerName="nova-api-log" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.678102 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd082226-18a8-469e-b28a-7094701112a3" containerName="nova-api-api" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.678129 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd082226-18a8-469e-b28a-7094701112a3" containerName="nova-api-log" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.680122 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.683454 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.683516 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.683796 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.690175 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.754588 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-public-tls-certs\") pod \"nova-api-0\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.755536 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-config-data\") pod \"nova-api-0\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.755693 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/efa8eaec-19fb-43da-a1f3-557b0847e966-logs\") pod \"nova-api-0\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.755799 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.755952 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fncrb\" (UniqueName: \"kubernetes.io/projected/efa8eaec-19fb-43da-a1f3-557b0847e966-kube-api-access-fncrb\") pod \"nova-api-0\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.756074 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-internal-tls-certs\") pod \"nova-api-0\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.856826 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-public-tls-certs\") pod \"nova-api-0\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.856924 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-config-data\") pod \"nova-api-0\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.857027 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/efa8eaec-19fb-43da-a1f3-557b0847e966-logs\") pod \"nova-api-0\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.857141 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.857215 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fncrb\" (UniqueName: \"kubernetes.io/projected/efa8eaec-19fb-43da-a1f3-557b0847e966-kube-api-access-fncrb\") pod \"nova-api-0\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.857359 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-internal-tls-certs\") pod \"nova-api-0\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.858384 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/efa8eaec-19fb-43da-a1f3-557b0847e966-logs\") pod \"nova-api-0\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.861617 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-public-tls-certs\") pod \"nova-api-0\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.862112 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.862810 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-config-data\") pod \"nova-api-0\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.868682 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-internal-tls-certs\") pod \"nova-api-0\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " pod="openstack/nova-api-0" Dec 10 07:12:02 crc kubenswrapper[4765]: I1210 07:12:02.879131 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fncrb\" (UniqueName: \"kubernetes.io/projected/efa8eaec-19fb-43da-a1f3-557b0847e966-kube-api-access-fncrb\") pod \"nova-api-0\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " pod="openstack/nova-api-0" Dec 10 07:12:03 crc kubenswrapper[4765]: I1210 07:12:03.003526 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 07:12:03 crc kubenswrapper[4765]: I1210 07:12:03.243977 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc","Type":"ContainerStarted","Data":"49d173bcd82030e3728295fd26cf9ba46fd74b265743bf325402dc0c5ce12e34"} Dec 10 07:12:03 crc kubenswrapper[4765]: I1210 07:12:03.244027 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc","Type":"ContainerStarted","Data":"f707edae2a556f90532dcf66301a00a51c4aade0f025e60306c8ac207c349e6b"} Dec 10 07:12:03 crc kubenswrapper[4765]: I1210 07:12:03.271811 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.271785236 podStartE2EDuration="2.271785236s" podCreationTimestamp="2025-12-10 07:12:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:12:03.262122841 +0000 UTC m=+1442.988788167" watchObservedRunningTime="2025-12-10 07:12:03.271785236 +0000 UTC m=+1442.998450552" Dec 10 07:12:03 crc kubenswrapper[4765]: I1210 07:12:03.448746 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:12:03 crc kubenswrapper[4765]: W1210 07:12:03.453350 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podefa8eaec_19fb_43da_a1f3_557b0847e966.slice/crio-e622eed2da2518c15d0f34c6464c0c6813f78d3244a0c9d6859617bb6b5c9a8a WatchSource:0}: Error finding container e622eed2da2518c15d0f34c6464c0c6813f78d3244a0c9d6859617bb6b5c9a8a: Status 404 returned error can't find the container with id e622eed2da2518c15d0f34c6464c0c6813f78d3244a0c9d6859617bb6b5c9a8a Dec 10 07:12:04 crc kubenswrapper[4765]: I1210 07:12:04.256093 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"efa8eaec-19fb-43da-a1f3-557b0847e966","Type":"ContainerStarted","Data":"73d0e7ff7a431b33ef59f1209e48d0aa7cb254124be09d9a85a9829d0d6c40a9"} Dec 10 07:12:04 crc kubenswrapper[4765]: I1210 07:12:04.256471 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"efa8eaec-19fb-43da-a1f3-557b0847e966","Type":"ContainerStarted","Data":"735741e28bbecc21170b1c3726ecc32ba7fee3884af169977fd59b27fb29dbe6"} Dec 10 07:12:04 crc kubenswrapper[4765]: I1210 07:12:04.256490 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"efa8eaec-19fb-43da-a1f3-557b0847e966","Type":"ContainerStarted","Data":"e622eed2da2518c15d0f34c6464c0c6813f78d3244a0c9d6859617bb6b5c9a8a"} Dec 10 07:12:04 crc kubenswrapper[4765]: I1210 07:12:04.279433 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.279407225 podStartE2EDuration="2.279407225s" podCreationTimestamp="2025-12-10 07:12:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:12:04.274866586 +0000 UTC m=+1444.001531902" watchObservedRunningTime="2025-12-10 07:12:04.279407225 +0000 UTC m=+1444.006072541" Dec 10 07:12:04 crc kubenswrapper[4765]: I1210 07:12:04.601012 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd082226-18a8-469e-b28a-7094701112a3" path="/var/lib/kubelet/pods/dd082226-18a8-469e-b28a-7094701112a3/volumes" Dec 10 07:12:04 crc kubenswrapper[4765]: I1210 07:12:04.642328 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 10 07:12:06 crc kubenswrapper[4765]: I1210 07:12:06.605877 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 10 07:12:06 crc kubenswrapper[4765]: I1210 07:12:06.605917 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 10 07:12:09 crc kubenswrapper[4765]: I1210 07:12:09.642308 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 10 07:12:09 crc kubenswrapper[4765]: I1210 07:12:09.673940 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 10 07:12:10 crc kubenswrapper[4765]: I1210 07:12:10.345690 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 10 07:12:11 crc kubenswrapper[4765]: I1210 07:12:11.591806 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 10 07:12:11 crc kubenswrapper[4765]: I1210 07:12:11.592111 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 10 07:12:12 crc kubenswrapper[4765]: I1210 07:12:12.606228 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.201:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 10 07:12:12 crc kubenswrapper[4765]: I1210 07:12:12.606204 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.201:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 10 07:12:13 crc kubenswrapper[4765]: I1210 07:12:13.004860 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 07:12:13 crc kubenswrapper[4765]: I1210 07:12:13.006422 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 07:12:14 crc kubenswrapper[4765]: I1210 07:12:14.018270 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="efa8eaec-19fb-43da-a1f3-557b0847e966" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 10 07:12:14 crc kubenswrapper[4765]: I1210 07:12:14.018270 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="efa8eaec-19fb-43da-a1f3-557b0847e966" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 10 07:12:17 crc kubenswrapper[4765]: I1210 07:12:17.441478 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 10 07:12:21 crc kubenswrapper[4765]: I1210 07:12:21.597179 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 10 07:12:21 crc kubenswrapper[4765]: I1210 07:12:21.597829 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 10 07:12:21 crc kubenswrapper[4765]: I1210 07:12:21.603163 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 10 07:12:21 crc kubenswrapper[4765]: I1210 07:12:21.603227 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 10 07:12:23 crc kubenswrapper[4765]: I1210 07:12:23.011046 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 10 07:12:23 crc kubenswrapper[4765]: I1210 07:12:23.011759 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 10 07:12:23 crc kubenswrapper[4765]: I1210 07:12:23.012130 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 10 07:12:23 crc kubenswrapper[4765]: I1210 07:12:23.012197 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 10 07:12:23 crc kubenswrapper[4765]: I1210 07:12:23.018569 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 10 07:12:23 crc kubenswrapper[4765]: I1210 07:12:23.019774 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 10 07:12:42 crc kubenswrapper[4765]: I1210 07:12:42.941014 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 10 07:12:42 crc kubenswrapper[4765]: I1210 07:12:42.941809 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="3d46af4c-da3b-47cb-a069-cb978f0df610" containerName="openstackclient" containerID="cri-o://8faab6e5a335c25d8b666620bbbd06c7e6cd700db4ab561bb39a1a3cf463d922" gracePeriod=2 Dec 10 07:12:42 crc kubenswrapper[4765]: I1210 07:12:42.980462 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.205730 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.206245 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="fb882c24-ec9a-4e19-99ac-b6f96c420cb5" containerName="openstack-network-exporter" containerID="cri-o://bc68bc51e690d35bd38c6ac72178fa005e623ece23b2db62f390bf0ff9151544" gracePeriod=300 Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.335538 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 07:12:43 crc kubenswrapper[4765]: E1210 07:12:43.468810 4765 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 10 07:12:43 crc kubenswrapper[4765]: E1210 07:12:43.469123 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-config-data podName:78b416b3-3796-4fa3-8a4f-7fa6107d98a1 nodeName:}" failed. No retries permitted until 2025-12-10 07:12:43.969078918 +0000 UTC m=+1483.695744234 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-config-data") pod "rabbitmq-server-0" (UID: "78b416b3-3796-4fa3-8a4f-7fa6107d98a1") : configmap "rabbitmq-config-data" not found Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.533053 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placementbd79-account-delete-fm679"] Dec 10 07:12:43 crc kubenswrapper[4765]: E1210 07:12:43.536467 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d46af4c-da3b-47cb-a069-cb978f0df610" containerName="openstackclient" Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.536490 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d46af4c-da3b-47cb-a069-cb978f0df610" containerName="openstackclient" Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.536697 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d46af4c-da3b-47cb-a069-cb978f0df610" containerName="openstackclient" Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.537410 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placementbd79-account-delete-fm679" Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.605321 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placementbd79-account-delete-fm679"] Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.618201 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron3778-account-delete-5p76n"] Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.620349 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron3778-account-delete-5p76n" Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.682162 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-wlh48"] Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.683746 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mgq5\" (UniqueName: \"kubernetes.io/projected/c417c995-d247-48fe-afea-472a698e27f4-kube-api-access-7mgq5\") pod \"placementbd79-account-delete-fm679\" (UID: \"c417c995-d247-48fe-afea-472a698e27f4\") " pod="openstack/placementbd79-account-delete-fm679" Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.683856 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c417c995-d247-48fe-afea-472a698e27f4-operator-scripts\") pod \"placementbd79-account-delete-fm679\" (UID: \"c417c995-d247-48fe-afea-472a698e27f4\") " pod="openstack/placementbd79-account-delete-fm679" Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.703963 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-v6h5d"] Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.740875 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-hhk4h"] Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.741184 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-hhk4h" podUID="416ec9f7-82f7-4eb1-a936-51038c6da878" containerName="openstack-network-exporter" containerID="cri-o://c7191e7227a33ed65e04cf4dbddada706ffb72fecee7791bcedd21e9c75811a5" gracePeriod=30 Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.790712 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mgq5\" (UniqueName: \"kubernetes.io/projected/c417c995-d247-48fe-afea-472a698e27f4-kube-api-access-7mgq5\") pod \"placementbd79-account-delete-fm679\" (UID: \"c417c995-d247-48fe-afea-472a698e27f4\") " pod="openstack/placementbd79-account-delete-fm679" Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.790808 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c417c995-d247-48fe-afea-472a698e27f4-operator-scripts\") pod \"placementbd79-account-delete-fm679\" (UID: \"c417c995-d247-48fe-afea-472a698e27f4\") " pod="openstack/placementbd79-account-delete-fm679" Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.790867 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bbedff0-5b89-4bbb-b308-6ccb13c8216c-operator-scripts\") pod \"neutron3778-account-delete-5p76n\" (UID: \"1bbedff0-5b89-4bbb-b308-6ccb13c8216c\") " pod="openstack/neutron3778-account-delete-5p76n" Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.790982 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cqdd\" (UniqueName: \"kubernetes.io/projected/1bbedff0-5b89-4bbb-b308-6ccb13c8216c-kube-api-access-6cqdd\") pod \"neutron3778-account-delete-5p76n\" (UID: \"1bbedff0-5b89-4bbb-b308-6ccb13c8216c\") " pod="openstack/neutron3778-account-delete-5p76n" Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.798391 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c417c995-d247-48fe-afea-472a698e27f4-operator-scripts\") pod \"placementbd79-account-delete-fm679\" (UID: \"c417c995-d247-48fe-afea-472a698e27f4\") " pod="openstack/placementbd79-account-delete-fm679" Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.840709 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mgq5\" (UniqueName: \"kubernetes.io/projected/c417c995-d247-48fe-afea-472a698e27f4-kube-api-access-7mgq5\") pod \"placementbd79-account-delete-fm679\" (UID: \"c417c995-d247-48fe-afea-472a698e27f4\") " pod="openstack/placementbd79-account-delete-fm679" Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.906647 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placementbd79-account-delete-fm679" Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.954386 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bbedff0-5b89-4bbb-b308-6ccb13c8216c-operator-scripts\") pod \"neutron3778-account-delete-5p76n\" (UID: \"1bbedff0-5b89-4bbb-b308-6ccb13c8216c\") " pod="openstack/neutron3778-account-delete-5p76n" Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.954563 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cqdd\" (UniqueName: \"kubernetes.io/projected/1bbedff0-5b89-4bbb-b308-6ccb13c8216c-kube-api-access-6cqdd\") pod \"neutron3778-account-delete-5p76n\" (UID: \"1bbedff0-5b89-4bbb-b308-6ccb13c8216c\") " pod="openstack/neutron3778-account-delete-5p76n" Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.955350 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bbedff0-5b89-4bbb-b308-6ccb13c8216c-operator-scripts\") pod \"neutron3778-account-delete-5p76n\" (UID: \"1bbedff0-5b89-4bbb-b308-6ccb13c8216c\") " pod="openstack/neutron3778-account-delete-5p76n" Dec 10 07:12:43 crc kubenswrapper[4765]: I1210 07:12:43.969318 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-hxr5k"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:43.995693 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="fb882c24-ec9a-4e19-99ac-b6f96c420cb5" containerName="ovsdbserver-nb" containerID="cri-o://434c9efa9a14afdab28272a1b95c4a867523ae3d5290dad154a712563877f571" gracePeriod=300 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.018528 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-wlh48"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.025727 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cqdd\" (UniqueName: \"kubernetes.io/projected/1bbedff0-5b89-4bbb-b308-6ccb13c8216c-kube-api-access-6cqdd\") pod \"neutron3778-account-delete-5p76n\" (UID: \"1bbedff0-5b89-4bbb-b308-6ccb13c8216c\") " pod="openstack/neutron3778-account-delete-5p76n" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.038397 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron3778-account-delete-5p76n"] Dec 10 07:12:44 crc kubenswrapper[4765]: E1210 07:12:44.059508 4765 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 10 07:12:44 crc kubenswrapper[4765]: E1210 07:12:44.059583 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-config-data podName:78b416b3-3796-4fa3-8a4f-7fa6107d98a1 nodeName:}" failed. No retries permitted until 2025-12-10 07:12:45.059562013 +0000 UTC m=+1484.786227329 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-config-data") pod "rabbitmq-server-0" (UID: "78b416b3-3796-4fa3-8a4f-7fa6107d98a1") : configmap "rabbitmq-config-data" not found Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.100510 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.100859 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="4b62b966-7b0a-4099-977c-44682f703187" containerName="ovn-northd" containerID="cri-o://07e96d7bfb948445f7eac9ed07260fdbff76c9bd60d8b7015a2929ae7b5bec3c" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.101038 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="4b62b966-7b0a-4099-977c-44682f703187" containerName="openstack-network-exporter" containerID="cri-o://85b3d34a3b05018017b81c0696d2b13e8bc79227f19e89ee345bdd1bfbe284c6" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.154576 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56c6c8bc97-9hn2v"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.154839 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" podUID="ea4a2d0b-62e8-4527-948f-9f9c76070af1" containerName="dnsmasq-dns" containerID="cri-o://18dc864cc764071e1ec5b63dc28acdb5ea1bab6885de1a4d32d341104a46d4bc" gracePeriod=10 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.184341 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.238606 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican43b0-account-delete-tm6mc"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.240258 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican43b0-account-delete-tm6mc" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.255145 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-tjvqv"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.264774 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzdfj\" (UniqueName: \"kubernetes.io/projected/ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4-kube-api-access-kzdfj\") pod \"barbican43b0-account-delete-tm6mc\" (UID: \"ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4\") " pod="openstack/barbican43b0-account-delete-tm6mc" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.264860 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4-operator-scripts\") pod \"barbican43b0-account-delete-tm6mc\" (UID: \"ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4\") " pod="openstack/barbican43b0-account-delete-tm6mc" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.271343 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron3778-account-delete-5p76n" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.291882 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-tjvqv"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.349564 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican43b0-account-delete-tm6mc"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.390193 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzdfj\" (UniqueName: \"kubernetes.io/projected/ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4-kube-api-access-kzdfj\") pod \"barbican43b0-account-delete-tm6mc\" (UID: \"ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4\") " pod="openstack/barbican43b0-account-delete-tm6mc" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.390782 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4-operator-scripts\") pod \"barbican43b0-account-delete-tm6mc\" (UID: \"ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4\") " pod="openstack/barbican43b0-account-delete-tm6mc" Dec 10 07:12:44 crc kubenswrapper[4765]: E1210 07:12:44.393011 4765 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 10 07:12:44 crc kubenswrapper[4765]: E1210 07:12:44.393075 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-config-data podName:7d035962-836c-48cf-8ea4-a3e5a23f58f9 nodeName:}" failed. No retries permitted until 2025-12-10 07:12:44.893054779 +0000 UTC m=+1484.619720175 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-config-data") pod "rabbitmq-cell1-server-0" (UID: "7d035962-836c-48cf-8ea4-a3e5a23f58f9") : configmap "rabbitmq-cell1-config-data" not found Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.398339 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4-operator-scripts\") pod \"barbican43b0-account-delete-tm6mc\" (UID: \"ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4\") " pod="openstack/barbican43b0-account-delete-tm6mc" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.415823 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder1abb-account-delete-h8mp2"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.437884 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder1abb-account-delete-h8mp2" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.492796 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzdfj\" (UniqueName: \"kubernetes.io/projected/ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4-kube-api-access-kzdfj\") pod \"barbican43b0-account-delete-tm6mc\" (UID: \"ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4\") " pod="openstack/barbican43b0-account-delete-tm6mc" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.498881 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder1abb-account-delete-h8mp2"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.539308 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-9mxvj"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.574166 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.577701 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="account-server" containerID="cri-o://4a3613cd84d23f05f8318366d8316a06d3e5f1244631e3c33dbba7434ad440a0" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.578107 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="swift-recon-cron" containerID="cri-o://39226f8cc8a03930bd902803a8b4288d6032977d3c35db4259b0445250e1e7b2" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.578139 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-auditor" containerID="cri-o://cc486fba5de7c78709592945f772f250acef64e3e50fa5a5b6a3fdbafcbf607a" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.578144 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-updater" containerID="cri-o://47cf7a985566866c36c60822d4497071985ac369da2fd01616c41dc5a2b1e19f" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.578155 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-replicator" containerID="cri-o://f35d3757711e456d6892f446dd4774ffaa8413dd814648075fe8bf69e3089c63" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.578168 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-expirer" containerID="cri-o://349bdf95834cb9f842e6b4c1d3638b41041514194cbcb5b970b43c4f5905a101" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.578172 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="container-server" containerID="cri-o://7135e81e69285b42d0ac4c07ee634eaa2b3499547717a42a6aa67314ff49099f" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.578190 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="container-replicator" containerID="cri-o://4594560a05f9217a4d5f8e88170509508194fe66c2914b7f958548a105104d7e" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.578175 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="rsync" containerID="cri-o://b867a8d1fd276cf779b269125911d71fbdc4fda5701cf8688d4e1b310044e049" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.578204 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="account-auditor" containerID="cri-o://b544a922da14385b7b5a0a7564e80057aa0adf7c8d4fad846360fa1ca4a7f25d" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.578212 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="account-reaper" containerID="cri-o://d3c7fe75c0f925fb125a734c029f2059bdb6fd265030817676b5b3b8a052fd9b" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.578187 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="container-auditor" containerID="cri-o://9cd9b93d1dbfce955b210d123285bdd2fbb9f7b231672a4aad54ede9927aaae9" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.578220 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="container-updater" containerID="cri-o://8a125681c2edc8966797c35fe861ce37d733c183b5ba7c09434cf876cdca5f73" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.578225 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="account-replicator" containerID="cri-o://ffc515962d5270d5f14f12935475f1e6e227235b5b9a72a37f6927cde51add52" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.578257 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-server" containerID="cri-o://d17735ab09308dc802722eda2a476a123f866d58e68d758cd7b67b2673a16eae" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.594641 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-9mxvj"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.609195 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican43b0-account-delete-tm6mc" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.595879 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fec9b72-da40-4d40-9f56-5eca02badaba-operator-scripts\") pod \"cinder1abb-account-delete-h8mp2\" (UID: \"1fec9b72-da40-4d40-9f56-5eca02badaba\") " pod="openstack/cinder1abb-account-delete-h8mp2" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.609498 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpz57\" (UniqueName: \"kubernetes.io/projected/1fec9b72-da40-4d40-9f56-5eca02badaba-kube-api-access-hpz57\") pod \"cinder1abb-account-delete-h8mp2\" (UID: \"1fec9b72-da40-4d40-9f56-5eca02badaba\") " pod="openstack/cinder1abb-account-delete-h8mp2" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.713975 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fec9b72-da40-4d40-9f56-5eca02badaba-operator-scripts\") pod \"cinder1abb-account-delete-h8mp2\" (UID: \"1fec9b72-da40-4d40-9f56-5eca02badaba\") " pod="openstack/cinder1abb-account-delete-h8mp2" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.714339 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpz57\" (UniqueName: \"kubernetes.io/projected/1fec9b72-da40-4d40-9f56-5eca02badaba-kube-api-access-hpz57\") pod \"cinder1abb-account-delete-h8mp2\" (UID: \"1fec9b72-da40-4d40-9f56-5eca02badaba\") " pod="openstack/cinder1abb-account-delete-h8mp2" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.715519 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fec9b72-da40-4d40-9f56-5eca02badaba-operator-scripts\") pod \"cinder1abb-account-delete-h8mp2\" (UID: \"1fec9b72-da40-4d40-9f56-5eca02badaba\") " pod="openstack/cinder1abb-account-delete-h8mp2" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.796647 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpz57\" (UniqueName: \"kubernetes.io/projected/1fec9b72-da40-4d40-9f56-5eca02badaba-kube-api-access-hpz57\") pod \"cinder1abb-account-delete-h8mp2\" (UID: \"1fec9b72-da40-4d40-9f56-5eca02badaba\") " pod="openstack/cinder1abb-account-delete-h8mp2" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.855279 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="182a289b-2c34-48a5-975d-94eccdf449fe" path="/var/lib/kubelet/pods/182a289b-2c34-48a5-975d-94eccdf449fe/volumes" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.856671 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b145a008-b02d-4c5b-b426-5da68b2d2d09" path="/var/lib/kubelet/pods/b145a008-b02d-4c5b-b426-5da68b2d2d09/volumes" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.857624 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee0d3c38-da7c-46ad-ad72-5870e7b61db0" path="/var/lib/kubelet/pods/ee0d3c38-da7c-46ad-ad72-5870e7b61db0/volumes" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.858536 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-bxqxj"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.858705 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-2tnf4"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.858787 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-2tnf4"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.859044 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-bxqxj"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.859623 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.859708 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-c976d8757-xmthg"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.859779 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-75b8c6446d-lf487"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.860285 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-75b8c6446d-lf487" podUID="5cbf2f96-d196-413b-841a-9b753e6beae2" containerName="placement-log" containerID="cri-o://0ec9efaecee0820558e3dfac5cce87e629fece52ce9474ad4a6e8484c9d7a6fb" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.861435 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-75b8c6446d-lf487" podUID="5cbf2f96-d196-413b-841a-9b753e6beae2" containerName="placement-api" containerID="cri-o://782ba5c041f2d0683f31e70ec7c7cd0a1637f3847ec96fe8a10620030fff50a0" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.861839 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-c976d8757-xmthg" podUID="cfc6fea8-973e-42c9-9482-a4853abec6c1" containerName="proxy-httpd" containerID="cri-o://e8fbadda4c283736804707e4d6088058b572b8678b484fe3d73e76e090942c5b" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.861895 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" containerName="openstack-network-exporter" containerID="cri-o://4ea50907947c896cad4543aac319e9ed34423034ee1e36f452043b39462ea022" gracePeriod=300 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.861932 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-c976d8757-xmthg" podUID="cfc6fea8-973e-42c9-9482-a4853abec6c1" containerName="proxy-server" containerID="cri-o://4fc9345636a9c7222a93a22391487b0745bc3710f4331dd26db99b3758d3c34b" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.863969 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-2sjp2"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.867284 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_fb882c24-ec9a-4e19-99ac-b6f96c420cb5/ovsdbserver-nb/0.log" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.867414 4765 generic.go:334] "Generic (PLEG): container finished" podID="fb882c24-ec9a-4e19-99ac-b6f96c420cb5" containerID="bc68bc51e690d35bd38c6ac72178fa005e623ece23b2db62f390bf0ff9151544" exitCode=2 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.867503 4765 generic.go:334] "Generic (PLEG): container finished" podID="fb882c24-ec9a-4e19-99ac-b6f96c420cb5" containerID="434c9efa9a14afdab28272a1b95c4a867523ae3d5290dad154a712563877f571" exitCode=143 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.867752 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"fb882c24-ec9a-4e19-99ac-b6f96c420cb5","Type":"ContainerDied","Data":"bc68bc51e690d35bd38c6ac72178fa005e623ece23b2db62f390bf0ff9151544"} Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.873315 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"fb882c24-ec9a-4e19-99ac-b6f96c420cb5","Type":"ContainerDied","Data":"434c9efa9a14afdab28272a1b95c4a867523ae3d5290dad154a712563877f571"} Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.878592 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glanceb1c8-account-delete-fghcc"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.880338 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glanceb1c8-account-delete-fghcc" Dec 10 07:12:44 crc kubenswrapper[4765]: E1210 07:12:44.887911 4765 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-hxr5k" message=< Dec 10 07:12:44 crc kubenswrapper[4765]: Exiting ovn-controller (1) [ OK ] Dec 10 07:12:44 crc kubenswrapper[4765]: > Dec 10 07:12:44 crc kubenswrapper[4765]: E1210 07:12:44.887960 4765 kuberuntime_container.go:691] "PreStop hook failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " pod="openstack/ovn-controller-hxr5k" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" containerID="cri-o://eb6633a896afa14ec87a70d84b7673bb68759a50c37aebfb43aa25d44e6ec694" Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.888000 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-hxr5k" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" containerID="cri-o://eb6633a896afa14ec87a70d84b7673bb68759a50c37aebfb43aa25d44e6ec694" gracePeriod=30 Dec 10 07:12:44 crc kubenswrapper[4765]: E1210 07:12:44.928390 4765 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 10 07:12:44 crc kubenswrapper[4765]: E1210 07:12:44.928488 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-config-data podName:7d035962-836c-48cf-8ea4-a3e5a23f58f9 nodeName:}" failed. No retries permitted until 2025-12-10 07:12:45.928463398 +0000 UTC m=+1485.655128714 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-config-data") pod "rabbitmq-cell1-server-0" (UID: "7d035962-836c-48cf-8ea4-a3e5a23f58f9") : configmap "rabbitmq-cell1-config-data" not found Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.933595 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-m9wx8"] Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.939589 4765 generic.go:334] "Generic (PLEG): container finished" podID="4b62b966-7b0a-4099-977c-44682f703187" containerID="85b3d34a3b05018017b81c0696d2b13e8bc79227f19e89ee345bdd1bfbe284c6" exitCode=2 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.939679 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"4b62b966-7b0a-4099-977c-44682f703187","Type":"ContainerDied","Data":"85b3d34a3b05018017b81c0696d2b13e8bc79227f19e89ee345bdd1bfbe284c6"} Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.949433 4765 generic.go:334] "Generic (PLEG): container finished" podID="ea4a2d0b-62e8-4527-948f-9f9c76070af1" containerID="18dc864cc764071e1ec5b63dc28acdb5ea1bab6885de1a4d32d341104a46d4bc" exitCode=0 Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.949477 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" event={"ID":"ea4a2d0b-62e8-4527-948f-9f9c76070af1","Type":"ContainerDied","Data":"18dc864cc764071e1ec5b63dc28acdb5ea1bab6885de1a4d32d341104a46d4bc"} Dec 10 07:12:44 crc kubenswrapper[4765]: I1210 07:12:44.956501 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-2sjp2"] Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.033278 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-m9wx8"] Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.038456 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcrqn\" (UniqueName: \"kubernetes.io/projected/3dbd0d4a-660a-4887-83ae-25c00f54196a-kube-api-access-lcrqn\") pod \"glanceb1c8-account-delete-fghcc\" (UID: \"3dbd0d4a-660a-4887-83ae-25c00f54196a\") " pod="openstack/glanceb1c8-account-delete-fghcc" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.038582 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3dbd0d4a-660a-4887-83ae-25c00f54196a-operator-scripts\") pod \"glanceb1c8-account-delete-fghcc\" (UID: \"3dbd0d4a-660a-4887-83ae-25c00f54196a\") " pod="openstack/glanceb1c8-account-delete-fghcc" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.050736 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" containerName="ovsdbserver-sb" containerID="cri-o://8a962de55c82b0ec8429a3fe9ee218997a5f0a7b38bc54e87077ef10ffdd1b1e" gracePeriod=300 Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.064603 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-c976d8757-xmthg" podUID="cfc6fea8-973e-42c9-9482-a4853abec6c1" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.168:8080/healthcheck\": dial tcp 10.217.0.168:8080: connect: connection refused" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.065283 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-c976d8757-xmthg" podUID="cfc6fea8-973e-42c9-9482-a4853abec6c1" containerName="proxy-server" probeResult="failure" output="Get \"https://10.217.0.168:8080/healthcheck\": dial tcp 10.217.0.168:8080: connect: connection refused" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.095233 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder1abb-account-delete-h8mp2" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.142177 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcrqn\" (UniqueName: \"kubernetes.io/projected/3dbd0d4a-660a-4887-83ae-25c00f54196a-kube-api-access-lcrqn\") pod \"glanceb1c8-account-delete-fghcc\" (UID: \"3dbd0d4a-660a-4887-83ae-25c00f54196a\") " pod="openstack/glanceb1c8-account-delete-fghcc" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.142252 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3dbd0d4a-660a-4887-83ae-25c00f54196a-operator-scripts\") pod \"glanceb1c8-account-delete-fghcc\" (UID: \"3dbd0d4a-660a-4887-83ae-25c00f54196a\") " pod="openstack/glanceb1c8-account-delete-fghcc" Dec 10 07:12:45 crc kubenswrapper[4765]: E1210 07:12:45.142484 4765 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 10 07:12:45 crc kubenswrapper[4765]: E1210 07:12:45.142547 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-config-data podName:78b416b3-3796-4fa3-8a4f-7fa6107d98a1 nodeName:}" failed. No retries permitted until 2025-12-10 07:12:47.142525427 +0000 UTC m=+1486.869190753 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-config-data") pod "rabbitmq-server-0" (UID: "78b416b3-3796-4fa3-8a4f-7fa6107d98a1") : configmap "rabbitmq-config-data" not found Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.144264 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3dbd0d4a-660a-4887-83ae-25c00f54196a-operator-scripts\") pod \"glanceb1c8-account-delete-fghcc\" (UID: \"3dbd0d4a-660a-4887-83ae-25c00f54196a\") " pod="openstack/glanceb1c8-account-delete-fghcc" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.186126 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-q9kp5"] Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.208124 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcrqn\" (UniqueName: \"kubernetes.io/projected/3dbd0d4a-660a-4887-83ae-25c00f54196a-kube-api-access-lcrqn\") pod \"glanceb1c8-account-delete-fghcc\" (UID: \"3dbd0d4a-660a-4887-83ae-25c00f54196a\") " pod="openstack/glanceb1c8-account-delete-fghcc" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.247937 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novacell092ce-account-delete-r87ss"] Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.251170 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell092ce-account-delete-r87ss" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.284972 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glanceb1c8-account-delete-fghcc"] Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.407583 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8trm\" (UniqueName: \"kubernetes.io/projected/b5b765ff-9a5a-48b8-80c7-e8bd56613fcc-kube-api-access-f8trm\") pod \"novacell092ce-account-delete-r87ss\" (UID: \"b5b765ff-9a5a-48b8-80c7-e8bd56613fcc\") " pod="openstack/novacell092ce-account-delete-r87ss" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.408138 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5b765ff-9a5a-48b8-80c7-e8bd56613fcc-operator-scripts\") pod \"novacell092ce-account-delete-r87ss\" (UID: \"b5b765ff-9a5a-48b8-80c7-e8bd56613fcc\") " pod="openstack/novacell092ce-account-delete-r87ss" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.447505 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glanceb1c8-account-delete-fghcc" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.510920 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5b765ff-9a5a-48b8-80c7-e8bd56613fcc-operator-scripts\") pod \"novacell092ce-account-delete-r87ss\" (UID: \"b5b765ff-9a5a-48b8-80c7-e8bd56613fcc\") " pod="openstack/novacell092ce-account-delete-r87ss" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.511121 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8trm\" (UniqueName: \"kubernetes.io/projected/b5b765ff-9a5a-48b8-80c7-e8bd56613fcc-kube-api-access-f8trm\") pod \"novacell092ce-account-delete-r87ss\" (UID: \"b5b765ff-9a5a-48b8-80c7-e8bd56613fcc\") " pod="openstack/novacell092ce-account-delete-r87ss" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.518933 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5b765ff-9a5a-48b8-80c7-e8bd56613fcc-operator-scripts\") pod \"novacell092ce-account-delete-r87ss\" (UID: \"b5b765ff-9a5a-48b8-80c7-e8bd56613fcc\") " pod="openstack/novacell092ce-account-delete-r87ss" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.536584 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-654b8cdb7c-84l5p"] Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.536919 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-654b8cdb7c-84l5p" podUID="4e9d4a75-10e4-46dd-9180-821c917a2b5e" containerName="neutron-api" containerID="cri-o://1e66dbb7593398b7b65f4ca08f3dd8507c55a48af39dcf35231d9d262161ded4" gracePeriod=30 Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.537455 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-654b8cdb7c-84l5p" podUID="4e9d4a75-10e4-46dd-9180-821c917a2b5e" containerName="neutron-httpd" containerID="cri-o://2b76a61363bc9d6919fda1604c79fc3c7f411448fef6487068bdd864dabfac24" gracePeriod=30 Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.547248 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8trm\" (UniqueName: \"kubernetes.io/projected/b5b765ff-9a5a-48b8-80c7-e8bd56613fcc-kube-api-access-f8trm\") pod \"novacell092ce-account-delete-r87ss\" (UID: \"b5b765ff-9a5a-48b8-80c7-e8bd56613fcc\") " pod="openstack/novacell092ce-account-delete-r87ss" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.564751 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-v6h5d" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovs-vswitchd" containerID="cri-o://d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" gracePeriod=29 Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.584974 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell092ce-account-delete-r87ss" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.695161 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell092ce-account-delete-r87ss"] Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.748511 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-q9kp5"] Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.827992 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.828333 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="202a617e-eb55-4702-8958-3502b6d8e91b" containerName="cinder-api-log" containerID="cri-o://280f956e598734a0c953052f0bef830e41258f83b3ea961721502f3e42f78557" gracePeriod=30 Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.828826 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="202a617e-eb55-4702-8958-3502b6d8e91b" containerName="cinder-api" containerID="cri-o://975b9afa0ea86dc2438e36b28f792b56c554b75ab840658b6921015304cc0b22" gracePeriod=30 Dec 10 07:12:45 crc kubenswrapper[4765]: E1210 07:12:45.846151 4765 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Dec 10 07:12:45 crc kubenswrapper[4765]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Dec 10 07:12:45 crc kubenswrapper[4765]: + source /usr/local/bin/container-scripts/functions Dec 10 07:12:45 crc kubenswrapper[4765]: ++ OVNBridge=br-int Dec 10 07:12:45 crc kubenswrapper[4765]: ++ OVNRemote=tcp:localhost:6642 Dec 10 07:12:45 crc kubenswrapper[4765]: ++ OVNEncapType=geneve Dec 10 07:12:45 crc kubenswrapper[4765]: ++ OVNAvailabilityZones= Dec 10 07:12:45 crc kubenswrapper[4765]: ++ EnableChassisAsGateway=true Dec 10 07:12:45 crc kubenswrapper[4765]: ++ PhysicalNetworks= Dec 10 07:12:45 crc kubenswrapper[4765]: ++ OVNHostName= Dec 10 07:12:45 crc kubenswrapper[4765]: ++ DB_FILE=/etc/openvswitch/conf.db Dec 10 07:12:45 crc kubenswrapper[4765]: ++ ovs_dir=/var/lib/openvswitch Dec 10 07:12:45 crc kubenswrapper[4765]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Dec 10 07:12:45 crc kubenswrapper[4765]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Dec 10 07:12:45 crc kubenswrapper[4765]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 10 07:12:45 crc kubenswrapper[4765]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 10 07:12:45 crc kubenswrapper[4765]: + sleep 0.5 Dec 10 07:12:45 crc kubenswrapper[4765]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 10 07:12:45 crc kubenswrapper[4765]: + sleep 0.5 Dec 10 07:12:45 crc kubenswrapper[4765]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 10 07:12:45 crc kubenswrapper[4765]: + sleep 0.5 Dec 10 07:12:45 crc kubenswrapper[4765]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 10 07:12:45 crc kubenswrapper[4765]: + cleanup_ovsdb_server_semaphore Dec 10 07:12:45 crc kubenswrapper[4765]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 10 07:12:45 crc kubenswrapper[4765]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Dec 10 07:12:45 crc kubenswrapper[4765]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-v6h5d" message=< Dec 10 07:12:45 crc kubenswrapper[4765]: Exiting ovsdb-server (5) [ OK ] Dec 10 07:12:45 crc kubenswrapper[4765]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Dec 10 07:12:45 crc kubenswrapper[4765]: + source /usr/local/bin/container-scripts/functions Dec 10 07:12:45 crc kubenswrapper[4765]: ++ OVNBridge=br-int Dec 10 07:12:45 crc kubenswrapper[4765]: ++ OVNRemote=tcp:localhost:6642 Dec 10 07:12:45 crc kubenswrapper[4765]: ++ OVNEncapType=geneve Dec 10 07:12:45 crc kubenswrapper[4765]: ++ OVNAvailabilityZones= Dec 10 07:12:45 crc kubenswrapper[4765]: ++ EnableChassisAsGateway=true Dec 10 07:12:45 crc kubenswrapper[4765]: ++ PhysicalNetworks= Dec 10 07:12:45 crc kubenswrapper[4765]: ++ OVNHostName= Dec 10 07:12:45 crc kubenswrapper[4765]: ++ DB_FILE=/etc/openvswitch/conf.db Dec 10 07:12:45 crc kubenswrapper[4765]: ++ ovs_dir=/var/lib/openvswitch Dec 10 07:12:45 crc kubenswrapper[4765]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Dec 10 07:12:45 crc kubenswrapper[4765]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Dec 10 07:12:45 crc kubenswrapper[4765]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 10 07:12:45 crc kubenswrapper[4765]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 10 07:12:45 crc kubenswrapper[4765]: + sleep 0.5 Dec 10 07:12:45 crc kubenswrapper[4765]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 10 07:12:45 crc kubenswrapper[4765]: + sleep 0.5 Dec 10 07:12:45 crc kubenswrapper[4765]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 10 07:12:45 crc kubenswrapper[4765]: + sleep 0.5 Dec 10 07:12:45 crc kubenswrapper[4765]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 10 07:12:45 crc kubenswrapper[4765]: + cleanup_ovsdb_server_semaphore Dec 10 07:12:45 crc kubenswrapper[4765]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 10 07:12:45 crc kubenswrapper[4765]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Dec 10 07:12:45 crc kubenswrapper[4765]: > Dec 10 07:12:45 crc kubenswrapper[4765]: E1210 07:12:45.849409 4765 kuberuntime_container.go:691] "PreStop hook failed" err=< Dec 10 07:12:45 crc kubenswrapper[4765]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Dec 10 07:12:45 crc kubenswrapper[4765]: + source /usr/local/bin/container-scripts/functions Dec 10 07:12:45 crc kubenswrapper[4765]: ++ OVNBridge=br-int Dec 10 07:12:45 crc kubenswrapper[4765]: ++ OVNRemote=tcp:localhost:6642 Dec 10 07:12:45 crc kubenswrapper[4765]: ++ OVNEncapType=geneve Dec 10 07:12:45 crc kubenswrapper[4765]: ++ OVNAvailabilityZones= Dec 10 07:12:45 crc kubenswrapper[4765]: ++ EnableChassisAsGateway=true Dec 10 07:12:45 crc kubenswrapper[4765]: ++ PhysicalNetworks= Dec 10 07:12:45 crc kubenswrapper[4765]: ++ OVNHostName= Dec 10 07:12:45 crc kubenswrapper[4765]: ++ DB_FILE=/etc/openvswitch/conf.db Dec 10 07:12:45 crc kubenswrapper[4765]: ++ ovs_dir=/var/lib/openvswitch Dec 10 07:12:45 crc kubenswrapper[4765]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Dec 10 07:12:45 crc kubenswrapper[4765]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Dec 10 07:12:45 crc kubenswrapper[4765]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 10 07:12:45 crc kubenswrapper[4765]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 10 07:12:45 crc kubenswrapper[4765]: + sleep 0.5 Dec 10 07:12:45 crc kubenswrapper[4765]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 10 07:12:45 crc kubenswrapper[4765]: + sleep 0.5 Dec 10 07:12:45 crc kubenswrapper[4765]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 10 07:12:45 crc kubenswrapper[4765]: + sleep 0.5 Dec 10 07:12:45 crc kubenswrapper[4765]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 10 07:12:45 crc kubenswrapper[4765]: + cleanup_ovsdb_server_semaphore Dec 10 07:12:45 crc kubenswrapper[4765]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 10 07:12:45 crc kubenswrapper[4765]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Dec 10 07:12:45 crc kubenswrapper[4765]: > pod="openstack/ovn-controller-ovs-v6h5d" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovsdb-server" containerID="cri-o://1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.849472 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-v6h5d" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovsdb-server" containerID="cri-o://1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" gracePeriod=28 Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.884042 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novaapi1a4f-account-delete-d276m"] Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.886217 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi1a4f-account-delete-d276m" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.899174 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.912594 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.913141 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="4406d4e6-b2a9-4e81-9672-b54775fad3bb" containerName="cinder-scheduler" containerID="cri-o://cb79bead574a5e96dc6ceab7d7db16192f7a0ba03fb849fa04afc06f3a7a41d8" gracePeriod=30 Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.913687 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="4406d4e6-b2a9-4e81-9672-b54775fad3bb" containerName="probe" containerID="cri-o://ef36e8ca9904a20d9e0bedad569a152dc56cb8af69b2e6ed8a76e1fc5c40bec5" gracePeriod=30 Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.932236 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapi1a4f-account-delete-d276m"] Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.959923 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f429f6df-3510-4c6a-b9e8-062895218832-operator-scripts\") pod \"novaapi1a4f-account-delete-d276m\" (UID: \"f429f6df-3510-4c6a-b9e8-062895218832\") " pod="openstack/novaapi1a4f-account-delete-d276m" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.960214 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnpt5\" (UniqueName: \"kubernetes.io/projected/f429f6df-3510-4c6a-b9e8-062895218832-kube-api-access-bnpt5\") pod \"novaapi1a4f-account-delete-d276m\" (UID: \"f429f6df-3510-4c6a-b9e8-062895218832\") " pod="openstack/novaapi1a4f-account-delete-d276m" Dec 10 07:12:45 crc kubenswrapper[4765]: E1210 07:12:45.960602 4765 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 10 07:12:45 crc kubenswrapper[4765]: E1210 07:12:45.960672 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-config-data podName:7d035962-836c-48cf-8ea4-a3e5a23f58f9 nodeName:}" failed. No retries permitted until 2025-12-10 07:12:47.960647447 +0000 UTC m=+1487.687312773 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-config-data") pod "rabbitmq-cell1-server-0" (UID: "7d035962-836c-48cf-8ea4-a3e5a23f58f9") : configmap "rabbitmq-cell1-config-data" not found Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.965455 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-hhk4h_416ec9f7-82f7-4eb1-a936-51038c6da878/openstack-network-exporter/0.log" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.965540 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.968349 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_fb882c24-ec9a-4e19-99ac-b6f96c420cb5/ovsdbserver-nb/0.log" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.968432 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.976856 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.977214 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" containerName="nova-metadata-log" containerID="cri-o://f707edae2a556f90532dcf66301a00a51c4aade0f025e60306c8ac207c349e6b" gracePeriod=30 Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.977391 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" containerName="nova-metadata-metadata" containerID="cri-o://49d173bcd82030e3728295fd26cf9ba46fd74b265743bf325402dc0c5ce12e34" gracePeriod=30 Dec 10 07:12:45 crc kubenswrapper[4765]: I1210 07:12:45.988848 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="78b416b3-3796-4fa3-8a4f-7fa6107d98a1" containerName="rabbitmq" containerID="cri-o://60f858d2d85b01e9da2a6a95ed2be831935e234ad3033222a4291f2052f6ce52" gracePeriod=604800 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.002673 4765 generic.go:334] "Generic (PLEG): container finished" podID="3d46af4c-da3b-47cb-a069-cb978f0df610" containerID="8faab6e5a335c25d8b666620bbbd06c7e6cd700db4ab561bb39a1a3cf463d922" exitCode=137 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.034432 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_fb882c24-ec9a-4e19-99ac-b6f96c420cb5/ovsdbserver-nb/0.log" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.039312 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.039954 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"fb882c24-ec9a-4e19-99ac-b6f96c420cb5","Type":"ContainerDied","Data":"38895890daf952bdf1d411aa5eebdb9892db798a207cb43a4f205adfa0027de0"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.045440 4765 scope.go:117] "RemoveContainer" containerID="bc68bc51e690d35bd38c6ac72178fa005e623ece23b2db62f390bf0ff9151544" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.052988 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.053780 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="48199101-c7d2-4881-98bd-53d14d7308d5" containerName="glance-log" containerID="cri-o://b362cb9831e6868cc5bfea4106470f04254c397b3cd98c304dac462e618c6408" gracePeriod=30 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.054557 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="48199101-c7d2-4881-98bd-53d14d7308d5" containerName="glance-httpd" containerID="cri-o://e51ededa81fc983915ee20952321442588bb91e3bed29e48234b289ecdd3cfdc" gracePeriod=30 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.063726 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.064122 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkrzg\" (UniqueName: \"kubernetes.io/projected/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-kube-api-access-wkrzg\") pod \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.064674 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sfxdr\" (UniqueName: \"kubernetes.io/projected/416ec9f7-82f7-4eb1-a936-51038c6da878-kube-api-access-sfxdr\") pod \"416ec9f7-82f7-4eb1-a936-51038c6da878\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.064789 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/416ec9f7-82f7-4eb1-a936-51038c6da878-combined-ca-bundle\") pod \"416ec9f7-82f7-4eb1-a936-51038c6da878\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.064870 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/416ec9f7-82f7-4eb1-a936-51038c6da878-config\") pod \"416ec9f7-82f7-4eb1-a936-51038c6da878\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.065026 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-scripts\") pod \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.067478 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.067699 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/416ec9f7-82f7-4eb1-a936-51038c6da878-ovs-rundir\") pod \"416ec9f7-82f7-4eb1-a936-51038c6da878\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.067785 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/416ec9f7-82f7-4eb1-a936-51038c6da878-metrics-certs-tls-certs\") pod \"416ec9f7-82f7-4eb1-a936-51038c6da878\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.067896 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-combined-ca-bundle\") pod \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.068002 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-config\") pod \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.068118 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-ovsdbserver-nb-tls-certs\") pod \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.068202 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-metrics-certs-tls-certs\") pod \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.068371 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-ovsdb-rundir\") pod \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\" (UID: \"fb882c24-ec9a-4e19-99ac-b6f96c420cb5\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.068792 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/416ec9f7-82f7-4eb1-a936-51038c6da878-ovn-rundir\") pod \"416ec9f7-82f7-4eb1-a936-51038c6da878\" (UID: \"416ec9f7-82f7-4eb1-a936-51038c6da878\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.071883 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f429f6df-3510-4c6a-b9e8-062895218832-operator-scripts\") pod \"novaapi1a4f-account-delete-d276m\" (UID: \"f429f6df-3510-4c6a-b9e8-062895218832\") " pod="openstack/novaapi1a4f-account-delete-d276m" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.072186 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnpt5\" (UniqueName: \"kubernetes.io/projected/f429f6df-3510-4c6a-b9e8-062895218832-kube-api-access-bnpt5\") pod \"novaapi1a4f-account-delete-d276m\" (UID: \"f429f6df-3510-4c6a-b9e8-062895218832\") " pod="openstack/novaapi1a4f-account-delete-d276m" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.081527 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f429f6df-3510-4c6a-b9e8-062895218832-operator-scripts\") pod \"novaapi1a4f-account-delete-d276m\" (UID: \"f429f6df-3510-4c6a-b9e8-062895218832\") " pod="openstack/novaapi1a4f-account-delete-d276m" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.066688 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-scripts" (OuterVolumeSpecName: "scripts") pod "fb882c24-ec9a-4e19-99ac-b6f96c420cb5" (UID: "fb882c24-ec9a-4e19-99ac-b6f96c420cb5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.072917 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/416ec9f7-82f7-4eb1-a936-51038c6da878-config" (OuterVolumeSpecName: "config") pod "416ec9f7-82f7-4eb1-a936-51038c6da878" (UID: "416ec9f7-82f7-4eb1-a936-51038c6da878"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.073029 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/416ec9f7-82f7-4eb1-a936-51038c6da878-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "416ec9f7-82f7-4eb1-a936-51038c6da878" (UID: "416ec9f7-82f7-4eb1-a936-51038c6da878"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.079201 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/416ec9f7-82f7-4eb1-a936-51038c6da878-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "416ec9f7-82f7-4eb1-a936-51038c6da878" (UID: "416ec9f7-82f7-4eb1-a936-51038c6da878"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.079569 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "fb882c24-ec9a-4e19-99ac-b6f96c420cb5" (UID: "fb882c24-ec9a-4e19-99ac-b6f96c420cb5"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.089534 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.092479 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-kube-api-access-wkrzg" (OuterVolumeSpecName: "kube-api-access-wkrzg") pod "fb882c24-ec9a-4e19-99ac-b6f96c420cb5" (UID: "fb882c24-ec9a-4e19-99ac-b6f96c420cb5"). InnerVolumeSpecName "kube-api-access-wkrzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.095794 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-config" (OuterVolumeSpecName: "config") pod "fb882c24-ec9a-4e19-99ac-b6f96c420cb5" (UID: "fb882c24-ec9a-4e19-99ac-b6f96c420cb5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.095726 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b1099ee9-e4d7-496f-b35e-7617ee456898" containerName="glance-httpd" containerID="cri-o://90ca341c2978c3ac47bd6f8955762450564583c1ccb4813fdb2ffc303391ba52" gracePeriod=30 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.095406 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b1099ee9-e4d7-496f-b35e-7617ee456898" containerName="glance-log" containerID="cri-o://532e6ef7ce06ebf94ad58b81fb2331379d4d7d0f9d8d702b8370e40c41c43459" gracePeriod=30 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.103078 4765 generic.go:334] "Generic (PLEG): container finished" podID="209844a2-e0ac-447f-99f6-28cd864ca648" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" exitCode=0 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.103857 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-v6h5d" event={"ID":"209844a2-e0ac-447f-99f6-28cd864ca648","Type":"ContainerDied","Data":"1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.109567 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-654b8cdb7c-84l5p" podUID="4e9d4a75-10e4-46dd-9180-821c917a2b5e" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.152:9696/\": dial tcp 10.217.0.152:9696: connect: connection refused" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.141806 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-hxr5k_8b2c99d6-f2e1-4c1c-8825-e8c62d00d133/ovn-controller/0.log" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.141873 4765 generic.go:334] "Generic (PLEG): container finished" podID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerID="eb6633a896afa14ec87a70d84b7673bb68759a50c37aebfb43aa25d44e6ec694" exitCode=0 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.141913 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.142414 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxr5k" event={"ID":"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133","Type":"ContainerDied","Data":"eb6633a896afa14ec87a70d84b7673bb68759a50c37aebfb43aa25d44e6ec694"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.146305 4765 generic.go:334] "Generic (PLEG): container finished" podID="cfc6fea8-973e-42c9-9482-a4853abec6c1" containerID="4fc9345636a9c7222a93a22391487b0745bc3710f4331dd26db99b3758d3c34b" exitCode=0 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.151441 4765 generic.go:334] "Generic (PLEG): container finished" podID="cfc6fea8-973e-42c9-9482-a4853abec6c1" containerID="e8fbadda4c283736804707e4d6088058b572b8678b484fe3d73e76e090942c5b" exitCode=0 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.147859 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "fb882c24-ec9a-4e19-99ac-b6f96c420cb5" (UID: "fb882c24-ec9a-4e19-99ac-b6f96c420cb5"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.147906 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/416ec9f7-82f7-4eb1-a936-51038c6da878-kube-api-access-sfxdr" (OuterVolumeSpecName: "kube-api-access-sfxdr") pod "416ec9f7-82f7-4eb1-a936-51038c6da878" (UID: "416ec9f7-82f7-4eb1-a936-51038c6da878"). InnerVolumeSpecName "kube-api-access-sfxdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.146841 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-c976d8757-xmthg" event={"ID":"cfc6fea8-973e-42c9-9482-a4853abec6c1","Type":"ContainerDied","Data":"4fc9345636a9c7222a93a22391487b0745bc3710f4331dd26db99b3758d3c34b"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.151815 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-c976d8757-xmthg" event={"ID":"cfc6fea8-973e-42c9-9482-a4853abec6c1","Type":"ContainerDied","Data":"e8fbadda4c283736804707e4d6088058b572b8678b484fe3d73e76e090942c5b"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.150811 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnpt5\" (UniqueName: \"kubernetes.io/projected/f429f6df-3510-4c6a-b9e8-062895218832-kube-api-access-bnpt5\") pod \"novaapi1a4f-account-delete-d276m\" (UID: \"f429f6df-3510-4c6a-b9e8-062895218832\") " pod="openstack/novaapi1a4f-account-delete-d276m" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.162950 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-hhk4h_416ec9f7-82f7-4eb1-a936-51038c6da878/openstack-network-exporter/0.log" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.163011 4765 generic.go:334] "Generic (PLEG): container finished" podID="416ec9f7-82f7-4eb1-a936-51038c6da878" containerID="c7191e7227a33ed65e04cf4dbddada706ffb72fecee7791bcedd21e9c75811a5" exitCode=2 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.163220 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-hhk4h" event={"ID":"416ec9f7-82f7-4eb1-a936-51038c6da878","Type":"ContainerDied","Data":"c7191e7227a33ed65e04cf4dbddada706ffb72fecee7791bcedd21e9c75811a5"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.163264 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-hhk4h" event={"ID":"416ec9f7-82f7-4eb1-a936-51038c6da878","Type":"ContainerDied","Data":"0501da2016dc7178ad83bb608e005806715667bdc0baa51a63d1196d7ff8a2dd"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.163355 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.177145 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-dns-swift-storage-0\") pod \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.177282 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-config\") pod \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.177380 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-dns-svc\") pod \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.177497 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4k98d\" (UniqueName: \"kubernetes.io/projected/ea4a2d0b-62e8-4527-948f-9f9c76070af1-kube-api-access-4k98d\") pod \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.177541 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-ovsdbserver-nb\") pod \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.177621 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-ovsdbserver-sb\") pod \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\" (UID: \"ea4a2d0b-62e8-4527-948f-9f9c76070af1\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.178291 4765 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/416ec9f7-82f7-4eb1-a936-51038c6da878-ovs-rundir\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.178313 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.178326 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.178340 4765 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/416ec9f7-82f7-4eb1-a936-51038c6da878-ovn-rundir\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.178352 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkrzg\" (UniqueName: \"kubernetes.io/projected/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-kube-api-access-wkrzg\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.178366 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sfxdr\" (UniqueName: \"kubernetes.io/projected/416ec9f7-82f7-4eb1-a936-51038c6da878-kube-api-access-sfxdr\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.178378 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/416ec9f7-82f7-4eb1-a936-51038c6da878-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.178390 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.178417 4765 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.180168 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fb882c24-ec9a-4e19-99ac-b6f96c420cb5" (UID: "fb882c24-ec9a-4e19-99ac-b6f96c420cb5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.180360 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-66fcc55b75-8hcl6"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.180658 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-66fcc55b75-8hcl6" podUID="865e0a69-0d85-4d93-9d38-f52449d09d87" containerName="barbican-worker-log" containerID="cri-o://2ea6901c79ede6a161d2e30da9d3b8efb9ba7a80dd2d68ffbc3c35ec54f42907" gracePeriod=30 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.180811 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" event={"ID":"ea4a2d0b-62e8-4527-948f-9f9c76070af1","Type":"ContainerDied","Data":"641171b1c810999c2ce70181f39975175fe8d345e816cb8977906ee989a0fe3d"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.180850 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-66fcc55b75-8hcl6" podUID="865e0a69-0d85-4d93-9d38-f52449d09d87" containerName="barbican-worker" containerID="cri-o://cb7c75803c1530a9151f1c8f58e20ea86e4933c86c600aeb7834d5e89e66efe6" gracePeriod=30 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.180969 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.187189 4765 scope.go:117] "RemoveContainer" containerID="434c9efa9a14afdab28272a1b95c4a867523ae3d5290dad154a712563877f571" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.195061 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placementbd79-account-delete-fm679" event={"ID":"c417c995-d247-48fe-afea-472a698e27f4","Type":"ContainerStarted","Data":"e6e1feb07b2ab1c0af54febb53cf41f2d76a8bc8d094a903a2bc867836f98134"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.202961 4765 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.210355 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-7756b4f44b-9575x"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.210817 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" podUID="367a49cf-488a-4852-8728-78dacbfbd500" containerName="barbican-keystone-listener-log" containerID="cri-o://6904c7a18870c99e59c094d428f4fb9aa0a7611afe5301700842c547c5f6ff4a" gracePeriod=30 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.211826 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" podUID="367a49cf-488a-4852-8728-78dacbfbd500" containerName="barbican-keystone-listener" containerID="cri-o://db7c3938391cd43e268448d1b8ffd385e79c3741b66633044668f1d83c8facc4" gracePeriod=30 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.218515 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c0befa24-0eda-4f25-9f15-bfb0ebb74e1e/ovsdbserver-sb/0.log" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.218570 4765 generic.go:334] "Generic (PLEG): container finished" podID="c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" containerID="4ea50907947c896cad4543aac319e9ed34423034ee1e36f452043b39462ea022" exitCode=2 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.218596 4765 generic.go:334] "Generic (PLEG): container finished" podID="c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" containerID="8a962de55c82b0ec8429a3fe9ee218997a5f0a7b38bc54e87077ef10ffdd1b1e" exitCode=143 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.218665 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e","Type":"ContainerDied","Data":"4ea50907947c896cad4543aac319e9ed34423034ee1e36f452043b39462ea022"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.218700 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e","Type":"ContainerDied","Data":"8a962de55c82b0ec8429a3fe9ee218997a5f0a7b38bc54e87077ef10ffdd1b1e"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.219360 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea4a2d0b-62e8-4527-948f-9f9c76070af1-kube-api-access-4k98d" (OuterVolumeSpecName: "kube-api-access-4k98d") pod "ea4a2d0b-62e8-4527-948f-9f9c76070af1" (UID: "ea4a2d0b-62e8-4527-948f-9f9c76070af1"). InnerVolumeSpecName "kube-api-access-4k98d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.239338 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.247499 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/416ec9f7-82f7-4eb1-a936-51038c6da878-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "416ec9f7-82f7-4eb1-a936-51038c6da878" (UID: "416ec9f7-82f7-4eb1-a936-51038c6da878"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.253390 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.261020 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="efa8eaec-19fb-43da-a1f3-557b0847e966" containerName="nova-api-log" containerID="cri-o://735741e28bbecc21170b1c3726ecc32ba7fee3884af169977fd59b27fb29dbe6" gracePeriod=30 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.261098 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="efa8eaec-19fb-43da-a1f3-557b0847e966" containerName="nova-api-api" containerID="cri-o://73d0e7ff7a431b33ef59f1209e48d0aa7cb254124be09d9a85a9829d0d6c40a9" gracePeriod=30 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.265795 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7df456d776-x4hrk"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.266304 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7df456d776-x4hrk" podUID="a99712e9-cab7-452c-9df1-d94b5c4d96af" containerName="barbican-api" containerID="cri-o://2caeb9530028c3e6065c938e2b0efd49e4234ecf49bcff419b6a01776b982ffe" gracePeriod=30 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.266831 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7df456d776-x4hrk" podUID="a99712e9-cab7-452c-9df1-d94b5c4d96af" containerName="barbican-api-log" containerID="cri-o://80d3af87b8b64fb1621fe28544a27dbb97a6487a3d0bf2b741bfbc88ebf45779" gracePeriod=30 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.281049 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3d46af4c-da3b-47cb-a069-cb978f0df610-openstack-config-secret\") pod \"3d46af4c-da3b-47cb-a069-cb978f0df610\" (UID: \"3d46af4c-da3b-47cb-a069-cb978f0df610\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.281374 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d46af4c-da3b-47cb-a069-cb978f0df610-combined-ca-bundle\") pod \"3d46af4c-da3b-47cb-a069-cb978f0df610\" (UID: \"3d46af4c-da3b-47cb-a069-cb978f0df610\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.281580 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3d46af4c-da3b-47cb-a069-cb978f0df610-openstack-config\") pod \"3d46af4c-da3b-47cb-a069-cb978f0df610\" (UID: \"3d46af4c-da3b-47cb-a069-cb978f0df610\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.281641 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7h2kf\" (UniqueName: \"kubernetes.io/projected/3d46af4c-da3b-47cb-a069-cb978f0df610-kube-api-access-7h2kf\") pod \"3d46af4c-da3b-47cb-a069-cb978f0df610\" (UID: \"3d46af4c-da3b-47cb-a069-cb978f0df610\") " Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.282359 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4k98d\" (UniqueName: \"kubernetes.io/projected/ea4a2d0b-62e8-4527-948f-9f9c76070af1-kube-api-access-4k98d\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.282375 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/416ec9f7-82f7-4eb1-a936-51038c6da878-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.282385 4765 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.282393 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285325 4765 generic.go:334] "Generic (PLEG): container finished" podID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerID="b867a8d1fd276cf779b269125911d71fbdc4fda5701cf8688d4e1b310044e049" exitCode=0 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285357 4765 generic.go:334] "Generic (PLEG): container finished" podID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerID="349bdf95834cb9f842e6b4c1d3638b41041514194cbcb5b970b43c4f5905a101" exitCode=0 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285366 4765 generic.go:334] "Generic (PLEG): container finished" podID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerID="47cf7a985566866c36c60822d4497071985ac369da2fd01616c41dc5a2b1e19f" exitCode=0 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285396 4765 generic.go:334] "Generic (PLEG): container finished" podID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerID="cc486fba5de7c78709592945f772f250acef64e3e50fa5a5b6a3fdbafcbf607a" exitCode=0 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285402 4765 generic.go:334] "Generic (PLEG): container finished" podID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerID="f35d3757711e456d6892f446dd4774ffaa8413dd814648075fe8bf69e3089c63" exitCode=0 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285409 4765 generic.go:334] "Generic (PLEG): container finished" podID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerID="d17735ab09308dc802722eda2a476a123f866d58e68d758cd7b67b2673a16eae" exitCode=0 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285416 4765 generic.go:334] "Generic (PLEG): container finished" podID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerID="8a125681c2edc8966797c35fe861ce37d733c183b5ba7c09434cf876cdca5f73" exitCode=0 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285423 4765 generic.go:334] "Generic (PLEG): container finished" podID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerID="9cd9b93d1dbfce955b210d123285bdd2fbb9f7b231672a4aad54ede9927aaae9" exitCode=0 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285432 4765 generic.go:334] "Generic (PLEG): container finished" podID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerID="4594560a05f9217a4d5f8e88170509508194fe66c2914b7f958548a105104d7e" exitCode=0 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285440 4765 generic.go:334] "Generic (PLEG): container finished" podID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerID="7135e81e69285b42d0ac4c07ee634eaa2b3499547717a42a6aa67314ff49099f" exitCode=0 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285465 4765 generic.go:334] "Generic (PLEG): container finished" podID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerID="d3c7fe75c0f925fb125a734c029f2059bdb6fd265030817676b5b3b8a052fd9b" exitCode=0 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285474 4765 generic.go:334] "Generic (PLEG): container finished" podID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerID="b544a922da14385b7b5a0a7564e80057aa0adf7c8d4fad846360fa1ca4a7f25d" exitCode=0 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285481 4765 generic.go:334] "Generic (PLEG): container finished" podID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerID="ffc515962d5270d5f14f12935475f1e6e227235b5b9a72a37f6927cde51add52" exitCode=0 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285487 4765 generic.go:334] "Generic (PLEG): container finished" podID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerID="4a3613cd84d23f05f8318366d8316a06d3e5f1244631e3c33dbba7434ad440a0" exitCode=0 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285601 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerDied","Data":"b867a8d1fd276cf779b269125911d71fbdc4fda5701cf8688d4e1b310044e049"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285655 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerDied","Data":"349bdf95834cb9f842e6b4c1d3638b41041514194cbcb5b970b43c4f5905a101"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285670 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerDied","Data":"47cf7a985566866c36c60822d4497071985ac369da2fd01616c41dc5a2b1e19f"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285681 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerDied","Data":"cc486fba5de7c78709592945f772f250acef64e3e50fa5a5b6a3fdbafcbf607a"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285710 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerDied","Data":"f35d3757711e456d6892f446dd4774ffaa8413dd814648075fe8bf69e3089c63"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285719 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerDied","Data":"d17735ab09308dc802722eda2a476a123f866d58e68d758cd7b67b2673a16eae"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285729 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerDied","Data":"8a125681c2edc8966797c35fe861ce37d733c183b5ba7c09434cf876cdca5f73"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285737 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerDied","Data":"9cd9b93d1dbfce955b210d123285bdd2fbb9f7b231672a4aad54ede9927aaae9"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285747 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerDied","Data":"4594560a05f9217a4d5f8e88170509508194fe66c2914b7f958548a105104d7e"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285755 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerDied","Data":"7135e81e69285b42d0ac4c07ee634eaa2b3499547717a42a6aa67314ff49099f"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285785 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerDied","Data":"d3c7fe75c0f925fb125a734c029f2059bdb6fd265030817676b5b3b8a052fd9b"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285796 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerDied","Data":"b544a922da14385b7b5a0a7564e80057aa0adf7c8d4fad846360fa1ca4a7f25d"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285805 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerDied","Data":"ffc515962d5270d5f14f12935475f1e6e227235b5b9a72a37f6927cde51add52"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.285814 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerDied","Data":"4a3613cd84d23f05f8318366d8316a06d3e5f1244631e3c33dbba7434ad440a0"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.308205 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.310718 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="f9af12be-6bc5-4aa8-bb84-135e3c0727cb" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://ac0fc8ce7b9fc642344add6c6901d9f6b0f18979e1bb126414b7d8b564fa757e" gracePeriod=30 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.317618 4765 generic.go:334] "Generic (PLEG): container finished" podID="5cbf2f96-d196-413b-841a-9b753e6beae2" containerID="0ec9efaecee0820558e3dfac5cce87e629fece52ce9474ad4a6e8484c9d7a6fb" exitCode=143 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.317671 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-75b8c6446d-lf487" event={"ID":"5cbf2f96-d196-413b-841a-9b753e6beae2","Type":"ContainerDied","Data":"0ec9efaecee0820558e3dfac5cce87e629fece52ce9474ad4a6e8484c9d7a6fb"} Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.322441 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi1a4f-account-delete-d276m" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.348973 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d46af4c-da3b-47cb-a069-cb978f0df610-kube-api-access-7h2kf" (OuterVolumeSpecName: "kube-api-access-7h2kf") pod "3d46af4c-da3b-47cb-a069-cb978f0df610" (UID: "3d46af4c-da3b-47cb-a069-cb978f0df610"). InnerVolumeSpecName "kube-api-access-7h2kf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.359577 4765 scope.go:117] "RemoveContainer" containerID="f2a1bc805d0d571e580746ca0bd410490b3bf5c4c923de3eebdaad87e5b82e4f" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.370377 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-sxcwt"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.383102 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-4d32-account-create-update-mpglj"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.386490 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7h2kf\" (UniqueName: \"kubernetes.io/projected/3d46af4c-da3b-47cb-a069-cb978f0df610-kube-api-access-7h2kf\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.432252 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-sxcwt"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.441915 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-4d32-account-create-update-mpglj"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.473569 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.473880 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d11b24c2-0ac0-4f23-a575-d1b80db4ba11" containerName="nova-scheduler-scheduler" containerID="cri-o://48722ecb98b55b709c43a7cab76c40a65934728cb7609e6246ba369957cdbe37" gracePeriod=30 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.487390 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.520755 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-9r7cb"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.536420 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-9r7cb"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.553123 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.554212 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="7694f523-adf7-4964-b475-6cd94cac7d75" containerName="nova-cell1-conductor-conductor" containerID="cri-o://b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90" gracePeriod=30 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.561724 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.561991 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="03838926-8208-43dc-9bfd-6af312a938a4" containerName="nova-cell0-conductor-conductor" containerID="cri-o://e60c3598bd56315a17f528317b4f8384d07efe59462616bb913d1726a981a607" gracePeriod=30 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.610513 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-5pfzm"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.631429 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="78b416b3-3796-4fa3-8a4f-7fa6107d98a1" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.104:5671: connect: connection refused" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.655964 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d46af4c-da3b-47cb-a069-cb978f0df610-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3d46af4c-da3b-47cb-a069-cb978f0df610" (UID: "3d46af4c-da3b-47cb-a069-cb978f0df610"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.727267 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ea4a2d0b-62e8-4527-948f-9f9c76070af1" (UID: "ea4a2d0b-62e8-4527-948f-9f9c76070af1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.739677 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="045989ab-ad14-4ec7-adda-fcb6054f8b6e" path="/var/lib/kubelet/pods/045989ab-ad14-4ec7-adda-fcb6054f8b6e/volumes" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.742059 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fbf6764-af16-4874-8b04-94c3cafebed7" path="/var/lib/kubelet/pods/2fbf6764-af16-4874-8b04-94c3cafebed7/volumes" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.743888 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bc06a2a-9f77-44e1-9b16-db768a97c7f7" path="/var/lib/kubelet/pods/3bc06a2a-9f77-44e1-9b16-db768a97c7f7/volumes" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.747693 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d46af4c-da3b-47cb-a069-cb978f0df610-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "3d46af4c-da3b-47cb-a069-cb978f0df610" (UID: "3d46af4c-da3b-47cb-a069-cb978f0df610"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: E1210 07:12:46.748702 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.751842 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4493c86d-6e67-409c-84f8-7285522e1580" path="/var/lib/kubelet/pods/4493c86d-6e67-409c-84f8-7285522e1580/volumes" Dec 10 07:12:46 crc kubenswrapper[4765]: E1210 07:12:46.752297 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 10 07:12:46 crc kubenswrapper[4765]: E1210 07:12:46.762941 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 10 07:12:46 crc kubenswrapper[4765]: E1210 07:12:46.763324 4765 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="7694f523-adf7-4964-b475-6cd94cac7d75" containerName="nova-cell1-conductor-conductor" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.765756 4765 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3d46af4c-da3b-47cb-a069-cb978f0df610-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.765798 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.765809 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d46af4c-da3b-47cb-a069-cb978f0df610-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.784685 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46580548-0bb0-4026-821b-2ee72fc56f70" path="/var/lib/kubelet/pods/46580548-0bb0-4026-821b-2ee72fc56f70/volumes" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.788800 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64581618-c8c5-4c6c-8c7f-59d8dc4150ec" path="/var/lib/kubelet/pods/64581618-c8c5-4c6c-8c7f-59d8dc4150ec/volumes" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.801346 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb" path="/var/lib/kubelet/pods/c3dcaeaf-9673-49bc-97d5-ac8b3a0815bb/volumes" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.816414 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdf8425c-b4a5-43ee-9cce-870ae66cdb6a" path="/var/lib/kubelet/pods/cdf8425c-b4a5-43ee-9cce-870ae66cdb6a/volumes" Dec 10 07:12:46 crc kubenswrapper[4765]: E1210 07:12:46.833118 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="07e96d7bfb948445f7eac9ed07260fdbff76c9bd60d8b7015a2929ae7b5bec3c" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 10 07:12:46 crc kubenswrapper[4765]: E1210 07:12:46.836414 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="07e96d7bfb948445f7eac9ed07260fdbff76c9bd60d8b7015a2929ae7b5bec3c" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 10 07:12:46 crc kubenswrapper[4765]: E1210 07:12:46.847447 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="07e96d7bfb948445f7eac9ed07260fdbff76c9bd60d8b7015a2929ae7b5bec3c" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 10 07:12:46 crc kubenswrapper[4765]: E1210 07:12:46.847542 4765 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="4b62b966-7b0a-4099-977c-44682f703187" containerName="ovn-northd" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.879010 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ea4a2d0b-62e8-4527-948f-9f9c76070af1" (UID: "ea4a2d0b-62e8-4527-948f-9f9c76070af1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.929582 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="7d035962-836c-48cf-8ea4-a3e5a23f58f9" containerName="rabbitmq" containerID="cri-o://f54cf327f09ae7940909c6415da47f057333ee5a45036d48b02fd4c6fd91cb2d" gracePeriod=604800 Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.929622 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/416ec9f7-82f7-4eb1-a936-51038c6da878-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "416ec9f7-82f7-4eb1-a936-51038c6da878" (UID: "416ec9f7-82f7-4eb1-a936-51038c6da878"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: E1210 07:12:46.929775 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of eb6633a896afa14ec87a70d84b7673bb68759a50c37aebfb43aa25d44e6ec694 is running failed: container process not found" containerID="eb6633a896afa14ec87a70d84b7673bb68759a50c37aebfb43aa25d44e6ec694" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.935238 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ea4a2d0b-62e8-4527-948f-9f9c76070af1" (UID: "ea4a2d0b-62e8-4527-948f-9f9c76070af1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.947654 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ea4a2d0b-62e8-4527-948f-9f9c76070af1" (UID: "ea4a2d0b-62e8-4527-948f-9f9c76070af1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.963962 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d46af4c-da3b-47cb-a069-cb978f0df610-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "3d46af4c-da3b-47cb-a069-cb978f0df610" (UID: "3d46af4c-da3b-47cb-a069-cb978f0df610"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: E1210 07:12:46.964352 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of eb6633a896afa14ec87a70d84b7673bb68759a50c37aebfb43aa25d44e6ec694 is running failed: container process not found" containerID="eb6633a896afa14ec87a70d84b7673bb68759a50c37aebfb43aa25d44e6ec694" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.964530 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-config" (OuterVolumeSpecName: "config") pod "ea4a2d0b-62e8-4527-948f-9f9c76070af1" (UID: "ea4a2d0b-62e8-4527-948f-9f9c76070af1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:46 crc kubenswrapper[4765]: E1210 07:12:46.965892 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of eb6633a896afa14ec87a70d84b7673bb68759a50c37aebfb43aa25d44e6ec694 is running failed: container process not found" containerID="eb6633a896afa14ec87a70d84b7673bb68759a50c37aebfb43aa25d44e6ec694" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Dec 10 07:12:46 crc kubenswrapper[4765]: E1210 07:12:46.965963 4765 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of eb6633a896afa14ec87a70d84b7673bb68759a50c37aebfb43aa25d44e6ec694 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-hxr5k" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.975682 4765 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.976008 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.976109 4765 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/416ec9f7-82f7-4eb1-a936-51038c6da878-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.977353 4765 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.977445 4765 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3d46af4c-da3b-47cb-a069-cb978f0df610-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:46 crc kubenswrapper[4765]: I1210 07:12:46.977529 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ea4a2d0b-62e8-4527-948f-9f9c76070af1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:47 crc kubenswrapper[4765]: E1210 07:12:47.017969 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 10 07:12:47 crc kubenswrapper[4765]: E1210 07:12:47.018260 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 10 07:12:47 crc kubenswrapper[4765]: E1210 07:12:47.021205 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 10 07:12:47 crc kubenswrapper[4765]: E1210 07:12:47.021465 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 10 07:12:47 crc kubenswrapper[4765]: E1210 07:12:47.028261 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 10 07:12:47 crc kubenswrapper[4765]: E1210 07:12:47.028560 4765 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-v6h5d" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovs-vswitchd" Dec 10 07:12:47 crc kubenswrapper[4765]: E1210 07:12:47.028958 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 10 07:12:47 crc kubenswrapper[4765]: E1210 07:12:47.029113 4765 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-v6h5d" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovsdb-server" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.072853 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-cell1-novncproxy-0" podUID="f9af12be-6bc5-4aa8-bb84-135e3c0727cb" containerName="nova-cell1-novncproxy-novncproxy" probeResult="failure" output="Get \"https://10.217.0.194:6080/vnc_lite.html\": dial tcp 10.217.0.194:6080: connect: connection refused" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.088565 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="27b317a6-1f99-4951-a064-e8ca8a38dc94" containerName="galera" containerID="cri-o://2ac6529be604d7f51ae87512f07a4957271ca6d55e16b66d66b5f6cd885d1250" gracePeriod=30 Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.094393 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "fb882c24-ec9a-4e19-99ac-b6f96c420cb5" (UID: "fb882c24-ec9a-4e19-99ac-b6f96c420cb5"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:47 crc kubenswrapper[4765]: W1210 07:12:47.098051 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fec9b72_da40_4d40_9f56_5eca02badaba.slice/crio-29d018ef434d38b31d6d6d5d4f43440bc92ef5dfc063b44f41213dc8d02084d5 WatchSource:0}: Error finding container 29d018ef434d38b31d6d6d5d4f43440bc92ef5dfc063b44f41213dc8d02084d5: Status 404 returned error can't find the container with id 29d018ef434d38b31d6d6d5d4f43440bc92ef5dfc063b44f41213dc8d02084d5 Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.100288 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "fb882c24-ec9a-4e19-99ac-b6f96c420cb5" (UID: "fb882c24-ec9a-4e19-99ac-b6f96c420cb5"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.180848 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.180885 4765 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb882c24-ec9a-4e19-99ac-b6f96c420cb5-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:47 crc kubenswrapper[4765]: E1210 07:12:47.180977 4765 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 10 07:12:47 crc kubenswrapper[4765]: E1210 07:12:47.181048 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-config-data podName:78b416b3-3796-4fa3-8a4f-7fa6107d98a1 nodeName:}" failed. No retries permitted until 2025-12-10 07:12:51.181024289 +0000 UTC m=+1490.907689595 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-config-data") pod "rabbitmq-server-0" (UID: "78b416b3-3796-4fa3-8a4f-7fa6107d98a1") : configmap "rabbitmq-config-data" not found Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.291397 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-5pfzm"] Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.291733 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placementbd79-account-delete-fm679"] Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.291766 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron3778-account-delete-5p76n"] Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.291778 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican43b0-account-delete-tm6mc"] Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.291789 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder1abb-account-delete-h8mp2"] Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.291800 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell092ce-account-delete-r87ss"] Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.291814 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glanceb1c8-account-delete-fghcc"] Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.301128 4765 scope.go:117] "RemoveContainer" containerID="c7191e7227a33ed65e04cf4dbddada706ffb72fecee7791bcedd21e9c75811a5" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.342974 4765 generic.go:334] "Generic (PLEG): container finished" podID="6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" containerID="f707edae2a556f90532dcf66301a00a51c4aade0f025e60306c8ac207c349e6b" exitCode=143 Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.343128 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc","Type":"ContainerDied","Data":"f707edae2a556f90532dcf66301a00a51c4aade0f025e60306c8ac207c349e6b"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.346123 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-c976d8757-xmthg" event={"ID":"cfc6fea8-973e-42c9-9482-a4853abec6c1","Type":"ContainerDied","Data":"9535eb635c22cb790bf8517ca738bd19e588c4aeba60d667a7af393f210d9bdb"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.346150 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9535eb635c22cb790bf8517ca738bd19e588c4aeba60d667a7af393f210d9bdb" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.347934 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican43b0-account-delete-tm6mc" event={"ID":"ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4","Type":"ContainerStarted","Data":"199b129a31c27fc5188f99fc68d6303ddc285b789dbe2af736f16ac465af972f"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.349490 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder1abb-account-delete-h8mp2" event={"ID":"1fec9b72-da40-4d40-9f56-5eca02badaba","Type":"ContainerStarted","Data":"29d018ef434d38b31d6d6d5d4f43440bc92ef5dfc063b44f41213dc8d02084d5"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.354100 4765 generic.go:334] "Generic (PLEG): container finished" podID="c417c995-d247-48fe-afea-472a698e27f4" containerID="ca5af5ea3d591a6d2f15cf44977e743548d34c8e91df4bdf5bad6f8c7094295d" exitCode=0 Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.354165 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placementbd79-account-delete-fm679" event={"ID":"c417c995-d247-48fe-afea-472a698e27f4","Type":"ContainerDied","Data":"ca5af5ea3d591a6d2f15cf44977e743548d34c8e91df4bdf5bad6f8c7094295d"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.358445 4765 generic.go:334] "Generic (PLEG): container finished" podID="865e0a69-0d85-4d93-9d38-f52449d09d87" containerID="2ea6901c79ede6a161d2e30da9d3b8efb9ba7a80dd2d68ffbc3c35ec54f42907" exitCode=143 Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.358517 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-66fcc55b75-8hcl6" event={"ID":"865e0a69-0d85-4d93-9d38-f52449d09d87","Type":"ContainerDied","Data":"2ea6901c79ede6a161d2e30da9d3b8efb9ba7a80dd2d68ffbc3c35ec54f42907"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.360972 4765 generic.go:334] "Generic (PLEG): container finished" podID="4e9d4a75-10e4-46dd-9180-821c917a2b5e" containerID="2b76a61363bc9d6919fda1604c79fc3c7f411448fef6487068bdd864dabfac24" exitCode=0 Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.361039 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-654b8cdb7c-84l5p" event={"ID":"4e9d4a75-10e4-46dd-9180-821c917a2b5e","Type":"ContainerDied","Data":"2b76a61363bc9d6919fda1604c79fc3c7f411448fef6487068bdd864dabfac24"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.363240 4765 generic.go:334] "Generic (PLEG): container finished" podID="a99712e9-cab7-452c-9df1-d94b5c4d96af" containerID="80d3af87b8b64fb1621fe28544a27dbb97a6487a3d0bf2b741bfbc88ebf45779" exitCode=143 Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.363291 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7df456d776-x4hrk" event={"ID":"a99712e9-cab7-452c-9df1-d94b5c4d96af","Type":"ContainerDied","Data":"80d3af87b8b64fb1621fe28544a27dbb97a6487a3d0bf2b741bfbc88ebf45779"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.368572 4765 generic.go:334] "Generic (PLEG): container finished" podID="202a617e-eb55-4702-8958-3502b6d8e91b" containerID="280f956e598734a0c953052f0bef830e41258f83b3ea961721502f3e42f78557" exitCode=143 Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.368663 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"202a617e-eb55-4702-8958-3502b6d8e91b","Type":"ContainerDied","Data":"280f956e598734a0c953052f0bef830e41258f83b3ea961721502f3e42f78557"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.386265 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hxr5k" event={"ID":"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133","Type":"ContainerDied","Data":"685b0cffb1b0fbd1627ca40e97cecc2329b1529f247aadd35a667c92ac186781"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.386323 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="685b0cffb1b0fbd1627ca40e97cecc2329b1529f247aadd35a667c92ac186781" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.401744 4765 generic.go:334] "Generic (PLEG): container finished" podID="4406d4e6-b2a9-4e81-9672-b54775fad3bb" containerID="ef36e8ca9904a20d9e0bedad569a152dc56cb8af69b2e6ed8a76e1fc5c40bec5" exitCode=0 Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.402320 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4406d4e6-b2a9-4e81-9672-b54775fad3bb","Type":"ContainerDied","Data":"ef36e8ca9904a20d9e0bedad569a152dc56cb8af69b2e6ed8a76e1fc5c40bec5"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.414692 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.417198 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c0befa24-0eda-4f25-9f15-bfb0ebb74e1e/ovsdbserver-sb/0.log" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.417280 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.453674 4765 scope.go:117] "RemoveContainer" containerID="c7191e7227a33ed65e04cf4dbddada706ffb72fecee7791bcedd21e9c75811a5" Dec 10 07:12:47 crc kubenswrapper[4765]: E1210 07:12:47.456854 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7191e7227a33ed65e04cf4dbddada706ffb72fecee7791bcedd21e9c75811a5\": container with ID starting with c7191e7227a33ed65e04cf4dbddada706ffb72fecee7791bcedd21e9c75811a5 not found: ID does not exist" containerID="c7191e7227a33ed65e04cf4dbddada706ffb72fecee7791bcedd21e9c75811a5" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.456915 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7191e7227a33ed65e04cf4dbddada706ffb72fecee7791bcedd21e9c75811a5"} err="failed to get container status \"c7191e7227a33ed65e04cf4dbddada706ffb72fecee7791bcedd21e9c75811a5\": rpc error: code = NotFound desc = could not find container \"c7191e7227a33ed65e04cf4dbddada706ffb72fecee7791bcedd21e9c75811a5\": container with ID starting with c7191e7227a33ed65e04cf4dbddada706ffb72fecee7791bcedd21e9c75811a5 not found: ID does not exist" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.456954 4765 scope.go:117] "RemoveContainer" containerID="18dc864cc764071e1ec5b63dc28acdb5ea1bab6885de1a4d32d341104a46d4bc" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.459565 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" event={"ID":"367a49cf-488a-4852-8728-78dacbfbd500","Type":"ContainerDied","Data":"6904c7a18870c99e59c094d428f4fb9aa0a7611afe5301700842c547c5f6ff4a"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.459590 4765 generic.go:334] "Generic (PLEG): container finished" podID="367a49cf-488a-4852-8728-78dacbfbd500" containerID="6904c7a18870c99e59c094d428f4fb9aa0a7611afe5301700842c547c5f6ff4a" exitCode=143 Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.461152 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell092ce-account-delete-r87ss" event={"ID":"b5b765ff-9a5a-48b8-80c7-e8bd56613fcc","Type":"ContainerStarted","Data":"a905958259d944135a9edcf564287cdcc035dce37230c5ffca81eca103511a25"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.466449 4765 generic.go:334] "Generic (PLEG): container finished" podID="f9af12be-6bc5-4aa8-bb84-135e3c0727cb" containerID="ac0fc8ce7b9fc642344add6c6901d9f6b0f18979e1bb126414b7d8b564fa757e" exitCode=0 Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.466518 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f9af12be-6bc5-4aa8-bb84-135e3c0727cb","Type":"ContainerDied","Data":"ac0fc8ce7b9fc642344add6c6901d9f6b0f18979e1bb126414b7d8b564fa757e"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.469939 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapi1a4f-account-delete-d276m"] Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.487440 4765 generic.go:334] "Generic (PLEG): container finished" podID="48199101-c7d2-4881-98bd-53d14d7308d5" containerID="b362cb9831e6868cc5bfea4106470f04254c397b3cd98c304dac462e618c6408" exitCode=143 Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.487567 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"48199101-c7d2-4881-98bd-53d14d7308d5","Type":"ContainerDied","Data":"b362cb9831e6868cc5bfea4106470f04254c397b3cd98c304dac462e618c6408"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.492335 4765 generic.go:334] "Generic (PLEG): container finished" podID="b1099ee9-e4d7-496f-b35e-7617ee456898" containerID="532e6ef7ce06ebf94ad58b81fb2331379d4d7d0f9d8d702b8370e40c41c43459" exitCode=143 Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.492422 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b1099ee9-e4d7-496f-b35e-7617ee456898","Type":"ContainerDied","Data":"532e6ef7ce06ebf94ad58b81fb2331379d4d7d0f9d8d702b8370e40c41c43459"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.494519 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron3778-account-delete-5p76n" event={"ID":"1bbedff0-5b89-4bbb-b308-6ccb13c8216c","Type":"ContainerStarted","Data":"46f2ad887ea199ee0b460ae4c0b62ff3105c158f8f030b5f8d87d7ce3a8cf87a"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.496983 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c0befa24-0eda-4f25-9f15-bfb0ebb74e1e/ovsdbserver-sb/0.log" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.497266 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e","Type":"ContainerDied","Data":"b110cc915a19b85f1c5643901f9c9c0dc3bc02950a2a678f0ee46627af709b5b"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.497371 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.505421 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glanceb1c8-account-delete-fghcc" event={"ID":"3dbd0d4a-660a-4887-83ae-25c00f54196a","Type":"ContainerStarted","Data":"1f0fda5394f1c52fd966b0eeaa38ca5acb14870e459355a8e1f4ea2b3771da73"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.516664 4765 generic.go:334] "Generic (PLEG): container finished" podID="efa8eaec-19fb-43da-a1f3-557b0847e966" containerID="735741e28bbecc21170b1c3726ecc32ba7fee3884af169977fd59b27fb29dbe6" exitCode=143 Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.516720 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"efa8eaec-19fb-43da-a1f3-557b0847e966","Type":"ContainerDied","Data":"735741e28bbecc21170b1c3726ecc32ba7fee3884af169977fd59b27fb29dbe6"} Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.581020 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxr5k" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.590541 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-metrics-certs-tls-certs\") pod \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.591475 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-ovsdb-rundir\") pod \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.593213 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-config\") pod \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.593343 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-scripts\") pod \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.593402 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.593437 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-ovsdbserver-sb-tls-certs\") pod \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.593459 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrqzv\" (UniqueName: \"kubernetes.io/projected/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-kube-api-access-wrqzv\") pod \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.593129 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" (UID: "c0befa24-0eda-4f25-9f15-bfb0ebb74e1e"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.594332 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-scripts" (OuterVolumeSpecName: "scripts") pod "c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" (UID: "c0befa24-0eda-4f25-9f15-bfb0ebb74e1e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.600633 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-config" (OuterVolumeSpecName: "config") pod "c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" (UID: "c0befa24-0eda-4f25-9f15-bfb0ebb74e1e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.601826 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-combined-ca-bundle\") pod \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\" (UID: \"c0befa24-0eda-4f25-9f15-bfb0ebb74e1e\") " Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.601878 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-ovn-controller-tls-certs\") pod \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.601906 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-var-log-ovn\") pod \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.602273 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" (UID: "c0befa24-0eda-4f25-9f15-bfb0ebb74e1e"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.602362 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" (UID: "8b2c99d6-f2e1-4c1c-8825-e8c62d00d133"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.602893 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.602913 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.602932 4765 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.602967 4765 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.602992 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.618170 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-kube-api-access-wrqzv" (OuterVolumeSpecName: "kube-api-access-wrqzv") pod "c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" (UID: "c0befa24-0eda-4f25-9f15-bfb0ebb74e1e"). InnerVolumeSpecName "kube-api-access-wrqzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.631362 4765 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.668392 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" (UID: "c0befa24-0eda-4f25-9f15-bfb0ebb74e1e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.703884 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-var-run-ovn\") pod \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.704039 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-var-run\") pod \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.704073 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-combined-ca-bundle\") pod \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.704213 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-scripts\") pod \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.704277 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6tc9\" (UniqueName: \"kubernetes.io/projected/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-kube-api-access-r6tc9\") pod \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\" (UID: \"8b2c99d6-f2e1-4c1c-8825-e8c62d00d133\") " Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.704713 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.704727 4765 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.704738 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrqzv\" (UniqueName: \"kubernetes.io/projected/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-kube-api-access-wrqzv\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.706815 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" (UID: "8b2c99d6-f2e1-4c1c-8825-e8c62d00d133"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.706871 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-var-run" (OuterVolumeSpecName: "var-run") pod "8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" (UID: "8b2c99d6-f2e1-4c1c-8825-e8c62d00d133"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.710038 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-kube-api-access-r6tc9" (OuterVolumeSpecName: "kube-api-access-r6tc9") pod "8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" (UID: "8b2c99d6-f2e1-4c1c-8825-e8c62d00d133"). InnerVolumeSpecName "kube-api-access-r6tc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.710516 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-scripts" (OuterVolumeSpecName: "scripts") pod "8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" (UID: "8b2c99d6-f2e1-4c1c-8825-e8c62d00d133"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.741394 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" (UID: "c0befa24-0eda-4f25-9f15-bfb0ebb74e1e"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.757821 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" (UID: "8b2c99d6-f2e1-4c1c-8825-e8c62d00d133"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.763136 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" (UID: "8b2c99d6-f2e1-4c1c-8825-e8c62d00d133"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.805539 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6tc9\" (UniqueName: \"kubernetes.io/projected/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-kube-api-access-r6tc9\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.805575 4765 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.805588 4765 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-var-run\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.805602 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.805614 4765 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.805624 4765 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.805639 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.818317 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" (UID: "c0befa24-0eda-4f25-9f15-bfb0ebb74e1e"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.882273 4765 scope.go:117] "RemoveContainer" containerID="bd271ae05f43791caf944cab99a0cb4df18082c95d8817a16dccc9c6bff3afb1" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.907584 4765 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.922281 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.942189 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.958822 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.972534 4765 scope.go:117] "RemoveContainer" containerID="8faab6e5a335c25d8b666620bbbd06c7e6cd700db4ab561bb39a1a3cf463d922" Dec 10 07:12:47 crc kubenswrapper[4765]: I1210 07:12:47.984257 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 10 07:12:48 crc kubenswrapper[4765]: E1210 07:12:48.025920 4765 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 10 07:12:48 crc kubenswrapper[4765]: E1210 07:12:48.026006 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-config-data podName:7d035962-836c-48cf-8ea4-a3e5a23f58f9 nodeName:}" failed. No retries permitted until 2025-12-10 07:12:52.025979302 +0000 UTC m=+1491.752644618 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-config-data") pod "rabbitmq-cell1-server-0" (UID: "7d035962-836c-48cf-8ea4-a3e5a23f58f9") : configmap "rabbitmq-cell1-config-data" not found Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.037142 4765 scope.go:117] "RemoveContainer" containerID="4ea50907947c896cad4543aac319e9ed34423034ee1e36f452043b39462ea022" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.083627 4765 scope.go:117] "RemoveContainer" containerID="8a962de55c82b0ec8429a3fe9ee218997a5f0a7b38bc54e87077ef10ffdd1b1e" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.130023 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-config-data\") pod \"cfc6fea8-973e-42c9-9482-a4853abec6c1\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.130129 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-combined-ca-bundle\") pod \"cfc6fea8-973e-42c9-9482-a4853abec6c1\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.130192 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74qx4\" (UniqueName: \"kubernetes.io/projected/cfc6fea8-973e-42c9-9482-a4853abec6c1-kube-api-access-74qx4\") pod \"cfc6fea8-973e-42c9-9482-a4853abec6c1\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.136345 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-public-tls-certs\") pod \"cfc6fea8-973e-42c9-9482-a4853abec6c1\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.136439 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-nova-novncproxy-tls-certs\") pod \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.136493 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-vencrypt-tls-certs\") pod \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.136571 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-internal-tls-certs\") pod \"cfc6fea8-973e-42c9-9482-a4853abec6c1\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.136895 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfc6fea8-973e-42c9-9482-a4853abec6c1-run-httpd\") pod \"cfc6fea8-973e-42c9-9482-a4853abec6c1\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.136934 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfc6fea8-973e-42c9-9482-a4853abec6c1-log-httpd\") pod \"cfc6fea8-973e-42c9-9482-a4853abec6c1\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.136954 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cfc6fea8-973e-42c9-9482-a4853abec6c1-etc-swift\") pod \"cfc6fea8-973e-42c9-9482-a4853abec6c1\" (UID: \"cfc6fea8-973e-42c9-9482-a4853abec6c1\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.136997 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-combined-ca-bundle\") pod \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.137066 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bg9m\" (UniqueName: \"kubernetes.io/projected/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-kube-api-access-4bg9m\") pod \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.137175 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-config-data\") pod \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\" (UID: \"f9af12be-6bc5-4aa8-bb84-135e3c0727cb\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.138570 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfc6fea8-973e-42c9-9482-a4853abec6c1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "cfc6fea8-973e-42c9-9482-a4853abec6c1" (UID: "cfc6fea8-973e-42c9-9482-a4853abec6c1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.148778 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfc6fea8-973e-42c9-9482-a4853abec6c1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "cfc6fea8-973e-42c9-9482-a4853abec6c1" (UID: "cfc6fea8-973e-42c9-9482-a4853abec6c1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.184565 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfc6fea8-973e-42c9-9482-a4853abec6c1-kube-api-access-74qx4" (OuterVolumeSpecName: "kube-api-access-74qx4") pod "cfc6fea8-973e-42c9-9482-a4853abec6c1" (UID: "cfc6fea8-973e-42c9-9482-a4853abec6c1"). InnerVolumeSpecName "kube-api-access-74qx4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.185004 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfc6fea8-973e-42c9-9482-a4853abec6c1-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "cfc6fea8-973e-42c9-9482-a4853abec6c1" (UID: "cfc6fea8-973e-42c9-9482-a4853abec6c1"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.187726 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.197061 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.203275 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-kube-api-access-4bg9m" (OuterVolumeSpecName: "kube-api-access-4bg9m") pod "f9af12be-6bc5-4aa8-bb84-135e3c0727cb" (UID: "f9af12be-6bc5-4aa8-bb84-135e3c0727cb"). InnerVolumeSpecName "kube-api-access-4bg9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.240288 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74qx4\" (UniqueName: \"kubernetes.io/projected/cfc6fea8-973e-42c9-9482-a4853abec6c1-kube-api-access-74qx4\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.240326 4765 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfc6fea8-973e-42c9-9482-a4853abec6c1-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.240341 4765 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfc6fea8-973e-42c9-9482-a4853abec6c1-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.240352 4765 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cfc6fea8-973e-42c9-9482-a4853abec6c1-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.240363 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bg9m\" (UniqueName: \"kubernetes.io/projected/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-kube-api-access-4bg9m\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.284979 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/placement-75b8c6446d-lf487" podUID="5cbf2f96-d196-413b-841a-9b753e6beae2" containerName="placement-log" probeResult="failure" output="Get \"https://10.217.0.153:8778/\": read tcp 10.217.0.2:54190->10.217.0.153:8778: read: connection reset by peer" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.285399 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/placement-75b8c6446d-lf487" podUID="5cbf2f96-d196-413b-841a-9b753e6beae2" containerName="placement-api" probeResult="failure" output="Get \"https://10.217.0.153:8778/\": read tcp 10.217.0.2:54200->10.217.0.153:8778: read: connection reset by peer" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.518671 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.529440 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f9af12be-6bc5-4aa8-bb84-135e3c0727cb" (UID: "f9af12be-6bc5-4aa8-bb84-135e3c0727cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.557643 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4nkww\" (UniqueName: \"kubernetes.io/projected/27b317a6-1f99-4951-a064-e8ca8a38dc94-kube-api-access-4nkww\") pod \"27b317a6-1f99-4951-a064-e8ca8a38dc94\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.557683 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/27b317a6-1f99-4951-a064-e8ca8a38dc94-config-data-generated\") pod \"27b317a6-1f99-4951-a064-e8ca8a38dc94\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.557706 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/27b317a6-1f99-4951-a064-e8ca8a38dc94-galera-tls-certs\") pod \"27b317a6-1f99-4951-a064-e8ca8a38dc94\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.557777 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27b317a6-1f99-4951-a064-e8ca8a38dc94-combined-ca-bundle\") pod \"27b317a6-1f99-4951-a064-e8ca8a38dc94\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.557829 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/27b317a6-1f99-4951-a064-e8ca8a38dc94-config-data-default\") pod \"27b317a6-1f99-4951-a064-e8ca8a38dc94\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.557890 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"27b317a6-1f99-4951-a064-e8ca8a38dc94\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.558005 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27b317a6-1f99-4951-a064-e8ca8a38dc94-operator-scripts\") pod \"27b317a6-1f99-4951-a064-e8ca8a38dc94\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.558044 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/27b317a6-1f99-4951-a064-e8ca8a38dc94-kolla-config\") pod \"27b317a6-1f99-4951-a064-e8ca8a38dc94\" (UID: \"27b317a6-1f99-4951-a064-e8ca8a38dc94\") " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.558281 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "f9af12be-6bc5-4aa8-bb84-135e3c0727cb" (UID: "f9af12be-6bc5-4aa8-bb84-135e3c0727cb"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.567289 4765 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.567327 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.575004 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27b317a6-1f99-4951-a064-e8ca8a38dc94-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "27b317a6-1f99-4951-a064-e8ca8a38dc94" (UID: "27b317a6-1f99-4951-a064-e8ca8a38dc94"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.575709 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27b317a6-1f99-4951-a064-e8ca8a38dc94-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "27b317a6-1f99-4951-a064-e8ca8a38dc94" (UID: "27b317a6-1f99-4951-a064-e8ca8a38dc94"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.576376 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27b317a6-1f99-4951-a064-e8ca8a38dc94-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "27b317a6-1f99-4951-a064-e8ca8a38dc94" (UID: "27b317a6-1f99-4951-a064-e8ca8a38dc94"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.579251 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27b317a6-1f99-4951-a064-e8ca8a38dc94-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "27b317a6-1f99-4951-a064-e8ca8a38dc94" (UID: "27b317a6-1f99-4951-a064-e8ca8a38dc94"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.584282 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cfc6fea8-973e-42c9-9482-a4853abec6c1" (UID: "cfc6fea8-973e-42c9-9482-a4853abec6c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.608692 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27b317a6-1f99-4951-a064-e8ca8a38dc94-kube-api-access-4nkww" (OuterVolumeSpecName: "kube-api-access-4nkww") pod "27b317a6-1f99-4951-a064-e8ca8a38dc94" (UID: "27b317a6-1f99-4951-a064-e8ca8a38dc94"). InnerVolumeSpecName "kube-api-access-4nkww". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.609254 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "f9af12be-6bc5-4aa8-bb84-135e3c0727cb" (UID: "f9af12be-6bc5-4aa8-bb84-135e3c0727cb"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.654295 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27b317a6-1f99-4951-a064-e8ca8a38dc94-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "27b317a6-1f99-4951-a064-e8ca8a38dc94" (UID: "27b317a6-1f99-4951-a064-e8ca8a38dc94"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.660918 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d46af4c-da3b-47cb-a069-cb978f0df610" path="/var/lib/kubelet/pods/3d46af4c-da3b-47cb-a069-cb978f0df610/volumes" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.661807 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" path="/var/lib/kubelet/pods/c0befa24-0eda-4f25-9f15-bfb0ebb74e1e/volumes" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.662599 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f346b982-1aa1-4398-8a1d-7171c8b9c0e4" path="/var/lib/kubelet/pods/f346b982-1aa1-4398-8a1d-7171c8b9c0e4/volumes" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.663738 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb882c24-ec9a-4e19-99ac-b6f96c420cb5" path="/var/lib/kubelet/pods/fb882c24-ec9a-4e19-99ac-b6f96c420cb5/volumes" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.669896 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27b317a6-1f99-4951-a064-e8ca8a38dc94-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.669938 4765 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/27b317a6-1f99-4951-a064-e8ca8a38dc94-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.669952 4765 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.669967 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27b317a6-1f99-4951-a064-e8ca8a38dc94-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.669982 4765 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/27b317a6-1f99-4951-a064-e8ca8a38dc94-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.669996 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4nkww\" (UniqueName: \"kubernetes.io/projected/27b317a6-1f99-4951-a064-e8ca8a38dc94-kube-api-access-4nkww\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.670007 4765 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/27b317a6-1f99-4951-a064-e8ca8a38dc94-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.670019 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.697573 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican43b0-account-delete-tm6mc" podStartSLOduration=5.697543034 podStartE2EDuration="5.697543034s" podCreationTimestamp="2025-12-10 07:12:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:12:48.688352653 +0000 UTC m=+1488.415017969" watchObservedRunningTime="2025-12-10 07:12:48.697543034 +0000 UTC m=+1488.424208370" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.716703 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-config-data" (OuterVolumeSpecName: "config-data") pod "f9af12be-6bc5-4aa8-bb84-135e3c0727cb" (UID: "f9af12be-6bc5-4aa8-bb84-135e3c0727cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.739135 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder1abb-account-delete-h8mp2" podStartSLOduration=5.738994823 podStartE2EDuration="5.738994823s" podCreationTimestamp="2025-12-10 07:12:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:12:48.705264794 +0000 UTC m=+1488.431930120" watchObservedRunningTime="2025-12-10 07:12:48.738994823 +0000 UTC m=+1488.465660139" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.745595 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.784966 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9af12be-6bc5-4aa8-bb84-135e3c0727cb-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.791904 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novacell092ce-account-delete-r87ss" podStartSLOduration=4.791868547 podStartE2EDuration="4.791868547s" podCreationTimestamp="2025-12-10 07:12:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:12:48.741068242 +0000 UTC m=+1488.467733558" watchObservedRunningTime="2025-12-10 07:12:48.791868547 +0000 UTC m=+1488.518533863" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.794564 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "cfc6fea8-973e-42c9-9482-a4853abec6c1" (UID: "cfc6fea8-973e-42c9-9482-a4853abec6c1"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.795460 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "mysql-db") pod "27b317a6-1f99-4951-a064-e8ca8a38dc94" (UID: "27b317a6-1f99-4951-a064-e8ca8a38dc94"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.842128 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-config-data" (OuterVolumeSpecName: "config-data") pod "cfc6fea8-973e-42c9-9482-a4853abec6c1" (UID: "cfc6fea8-973e-42c9-9482-a4853abec6c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.857607 4765 generic.go:334] "Generic (PLEG): container finished" podID="5cbf2f96-d196-413b-841a-9b753e6beae2" containerID="782ba5c041f2d0683f31e70ec7c7cd0a1637f3847ec96fe8a10620030fff50a0" exitCode=0 Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.857814 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27b317a6-1f99-4951-a064-e8ca8a38dc94-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "27b317a6-1f99-4951-a064-e8ca8a38dc94" (UID: "27b317a6-1f99-4951-a064-e8ca8a38dc94"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.861213 4765 generic.go:334] "Generic (PLEG): container finished" podID="27b317a6-1f99-4951-a064-e8ca8a38dc94" containerID="2ac6529be604d7f51ae87512f07a4957271ca6d55e16b66d66b5f6cd885d1250" exitCode=0 Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.861431 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.862579 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hxr5k" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.864007 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-c976d8757-xmthg" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.869334 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "cfc6fea8-973e-42c9-9482-a4853abec6c1" (UID: "cfc6fea8-973e-42c9-9482-a4853abec6c1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.887288 4765 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.887333 4765 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.887344 4765 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.887355 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfc6fea8-973e-42c9-9482-a4853abec6c1-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.887364 4765 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/27b317a6-1f99-4951-a064-e8ca8a38dc94-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.960346 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron3778-account-delete-5p76n" podStartSLOduration=5.960315938 podStartE2EDuration="5.960315938s" podCreationTimestamp="2025-12-10 07:12:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 07:12:48.848078646 +0000 UTC m=+1488.574743962" watchObservedRunningTime="2025-12-10 07:12:48.960315938 +0000 UTC m=+1488.686981254" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.974601 4765 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 10 07:12:48 crc kubenswrapper[4765]: I1210 07:12:48.997961 4765 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.087875 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="202a617e-eb55-4702-8958-3502b6d8e91b" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.164:8776/healthcheck\": read tcp 10.217.0.2:60730->10.217.0.164:8776: read: connection reset by peer" Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310103 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican43b0-account-delete-tm6mc" event={"ID":"ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4","Type":"ContainerStarted","Data":"818398cac57dcefcb64574fce10496d462c9aa4895bf95a9491256d4561e080a"} Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310407 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder1abb-account-delete-h8mp2" event={"ID":"1fec9b72-da40-4d40-9f56-5eca02badaba","Type":"ContainerStarted","Data":"fa9b039bec586c60153cc47099fde851491eb2e2c1af97a51854e0c46850f414"} Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310433 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell092ce-account-delete-r87ss" event={"ID":"b5b765ff-9a5a-48b8-80c7-e8bd56613fcc","Type":"ContainerStarted","Data":"7f551c1327d32f1311b5f2415da3d67dc653cf1705ba6dbc430aac97617ada56"} Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310447 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f9af12be-6bc5-4aa8-bb84-135e3c0727cb","Type":"ContainerDied","Data":"dba326d00f38aea0d1f16f7db9e11e4e50e2d433fbe13674f301ff609209d76d"} Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310467 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glanceb1c8-account-delete-fghcc" event={"ID":"3dbd0d4a-660a-4887-83ae-25c00f54196a","Type":"ContainerStarted","Data":"32b6a76ccd1daa4a50f1524fe483981c2e3cd65443b53c4cd6b98ae3db49899e"} Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310480 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi1a4f-account-delete-d276m" event={"ID":"f429f6df-3510-4c6a-b9e8-062895218832","Type":"ContainerStarted","Data":"7a879c78d6d7c70bb65a60b88c1cb601644349aa93bf05f1fc08e3e60d091cea"} Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310493 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310522 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi1a4f-account-delete-d276m" event={"ID":"f429f6df-3510-4c6a-b9e8-062895218832","Type":"ContainerStarted","Data":"516aec2074d0d4765cac6d3b8311aa558452b2d93db2a6a499432d415d50e077"} Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310544 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310558 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron3778-account-delete-5p76n" event={"ID":"1bbedff0-5b89-4bbb-b308-6ccb13c8216c","Type":"ContainerStarted","Data":"a85ff52f8967b342b29d29d59b703382c1f6ef448d73456ffa0f05292be1900c"} Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310579 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310592 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-75b8c6446d-lf487" event={"ID":"5cbf2f96-d196-413b-841a-9b753e6beae2","Type":"ContainerDied","Data":"782ba5c041f2d0683f31e70ec7c7cd0a1637f3847ec96fe8a10620030fff50a0"} Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310604 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-dsf6c"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310616 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-dsf6c"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310630 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-7wlqz"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310661 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-586f9fc866-46mgw"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310692 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310712 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"27b317a6-1f99-4951-a064-e8ca8a38dc94","Type":"ContainerDied","Data":"2ac6529be604d7f51ae87512f07a4957271ca6d55e16b66d66b5f6cd885d1250"} Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310731 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-7wlqz"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310746 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-zx6hx"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310756 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"27b317a6-1f99-4951-a064-e8ca8a38dc94","Type":"ContainerDied","Data":"5faea9cb8c82b1ef0e80b974615ade74d087297cfe0e8e6ccff9010d9f9f35ac"} Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310767 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-zx6hx"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310778 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-dc1e-account-create-update-tz4tl"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310789 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-dc1e-account-create-update-tz4tl"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310798 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-4bdwz"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310809 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-4bdwz"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310840 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="655c5f62-880e-4e05-9db8-da19844facdf" containerName="memcached" containerID="cri-o://b203f59bc5f85514c464b901df4f284e9f82d4b70b43ff25aa23de50dd64c527" gracePeriod=30 Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.310965 4765 scope.go:117] "RemoveContainer" containerID="ac0fc8ce7b9fc642344add6c6901d9f6b0f18979e1bb126414b7d8b564fa757e" Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.311197 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerName="ceilometer-central-agent" containerID="cri-o://38e7778e406b7c1c45d7bce71effadc0372bfa9af421a74aaf2eef133262aa8c" gracePeriod=30 Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.311313 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="e6c300c8-c9c7-40c3-8874-236b21eb4856" containerName="kube-state-metrics" containerID="cri-o://e98ba8a1e17537f2f52fedb3f0ff188945fc2a52ebce2eae3f075e33b33804bd" gracePeriod=30 Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.311773 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerName="proxy-httpd" containerID="cri-o://5c25e5022d3f67b2367a689a9cf70865a17087f11b9b9853173e30c48e270b2d" gracePeriod=30 Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.313343 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerName="sg-core" containerID="cri-o://eff0f8e0952a591ccab8adaf9372a01139e40a5b801b999a936af80ed6f7fd8e" gracePeriod=30 Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.313383 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerName="ceilometer-notification-agent" containerID="cri-o://0f83ede292c143b8abe040b9c236b324a83ebd2fc80b7a5c8ba399301b7c6a04" gracePeriod=30 Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.313525 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-586f9fc866-46mgw" podUID="857ecb8b-72dc-40a7-a407-d85c40c40bcf" containerName="keystone-api" containerID="cri-o://97e9fc4165ac8704c4c0452ae52b6847fb82f3a8bea1c0cd166df2f7ea247752" gracePeriod=30 Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.432263 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-b1c8-account-create-update-7h2mp"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.477486 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.201:8775/\": read tcp 10.217.0.2:36754->10.217.0.201:8775: read: connection reset by peer" Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.477557 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.201:8775/\": read tcp 10.217.0.2:36756->10.217.0.201:8775: read: connection reset by peer" Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.501508 4765 scope.go:117] "RemoveContainer" containerID="2ac6529be604d7f51ae87512f07a4957271ca6d55e16b66d66b5f6cd885d1250" Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.521205 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-b1c8-account-create-update-7h2mp"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.536370 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glanceb1c8-account-delete-fghcc"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.536825 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placementbd79-account-delete-fm679" Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.549836 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7df456d776-x4hrk" podUID="a99712e9-cab7-452c-9df1-d94b5c4d96af" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.165:9311/healthcheck\": read tcp 10.217.0.2:53388->10.217.0.165:9311: read: connection reset by peer" Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.550257 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7df456d776-x4hrk" podUID="a99712e9-cab7-452c-9df1-d94b5c4d96af" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.165:9311/healthcheck\": read tcp 10.217.0.2:53382->10.217.0.165:9311: read: connection reset by peer" Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.557169 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-2nwrc"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.592408 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-2nwrc"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.620365 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell092ce-account-delete-r87ss"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.649183 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-92ce-account-create-update-clqvf"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.657579 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-92ce-account-create-update-clqvf"] Dec 10 07:12:49 crc kubenswrapper[4765]: E1210 07:12:49.669592 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="48722ecb98b55b709c43a7cab76c40a65934728cb7609e6246ba369957cdbe37" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.671617 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 10 07:12:49 crc kubenswrapper[4765]: E1210 07:12:49.682109 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="48722ecb98b55b709c43a7cab76c40a65934728cb7609e6246ba369957cdbe37" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 10 07:12:49 crc kubenswrapper[4765]: E1210 07:12:49.684819 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="48722ecb98b55b709c43a7cab76c40a65934728cb7609e6246ba369957cdbe37" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 10 07:12:49 crc kubenswrapper[4765]: E1210 07:12:49.684878 4765 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="d11b24c2-0ac0-4f23-a575-d1b80db4ba11" containerName="nova-scheduler-scheduler" Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.685222 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.707306 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.718102 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.724155 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c417c995-d247-48fe-afea-472a698e27f4-operator-scripts\") pod \"c417c995-d247-48fe-afea-472a698e27f4\" (UID: \"c417c995-d247-48fe-afea-472a698e27f4\") " Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.724279 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mgq5\" (UniqueName: \"kubernetes.io/projected/c417c995-d247-48fe-afea-472a698e27f4-kube-api-access-7mgq5\") pod \"c417c995-d247-48fe-afea-472a698e27f4\" (UID: \"c417c995-d247-48fe-afea-472a698e27f4\") " Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.726633 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c417c995-d247-48fe-afea-472a698e27f4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c417c995-d247-48fe-afea-472a698e27f4" (UID: "c417c995-d247-48fe-afea-472a698e27f4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.735329 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c417c995-d247-48fe-afea-472a698e27f4-kube-api-access-7mgq5" (OuterVolumeSpecName: "kube-api-access-7mgq5") pod "c417c995-d247-48fe-afea-472a698e27f4" (UID: "c417c995-d247-48fe-afea-472a698e27f4"). InnerVolumeSpecName "kube-api-access-7mgq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.743370 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-hxr5k"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.763638 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-hxr5k"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.798062 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-c976d8757-xmthg"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.818440 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-c976d8757-xmthg"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.826608 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c417c995-d247-48fe-afea-472a698e27f4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.826647 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mgq5\" (UniqueName: \"kubernetes.io/projected/c417c995-d247-48fe-afea-472a698e27f4-kube-api-access-7mgq5\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.826676 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-vclgh"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.834307 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-vclgh"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.859698 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi1a4f-account-delete-d276m"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.881987 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-1a4f-account-create-update-mw9d9"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.886752 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="949ef1d3-9f74-4052-a482-9fea4e48d374" containerName="galera" containerID="cri-o://df8d58bedf5e00ee650bbe91fcb03935b51f76f914be11d02374897423dbf609" gracePeriod=30 Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.894626 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-1a4f-account-create-update-mw9d9"] Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.896469 4765 generic.go:334] "Generic (PLEG): container finished" podID="6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" containerID="49d173bcd82030e3728295fd26cf9ba46fd74b265743bf325402dc0c5ce12e34" exitCode=0 Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.896665 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc","Type":"ContainerDied","Data":"49d173bcd82030e3728295fd26cf9ba46fd74b265743bf325402dc0c5ce12e34"} Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.909330 4765 generic.go:334] "Generic (PLEG): container finished" podID="3dbd0d4a-660a-4887-83ae-25c00f54196a" containerID="32b6a76ccd1daa4a50f1524fe483981c2e3cd65443b53c4cd6b98ae3db49899e" exitCode=0 Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.909482 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glanceb1c8-account-delete-fghcc" event={"ID":"3dbd0d4a-660a-4887-83ae-25c00f54196a","Type":"ContainerDied","Data":"32b6a76ccd1daa4a50f1524fe483981c2e3cd65443b53c4cd6b98ae3db49899e"} Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.939758 4765 generic.go:334] "Generic (PLEG): container finished" podID="b1099ee9-e4d7-496f-b35e-7617ee456898" containerID="90ca341c2978c3ac47bd6f8955762450564583c1ccb4813fdb2ffc303391ba52" exitCode=0 Dec 10 07:12:49 crc kubenswrapper[4765]: I1210 07:12:49.939878 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b1099ee9-e4d7-496f-b35e-7617ee456898","Type":"ContainerDied","Data":"90ca341c2978c3ac47bd6f8955762450564583c1ccb4813fdb2ffc303391ba52"} Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.001589 4765 generic.go:334] "Generic (PLEG): container finished" podID="1fec9b72-da40-4d40-9f56-5eca02badaba" containerID="fa9b039bec586c60153cc47099fde851491eb2e2c1af97a51854e0c46850f414" exitCode=0 Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.001716 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder1abb-account-delete-h8mp2" event={"ID":"1fec9b72-da40-4d40-9f56-5eca02badaba","Type":"ContainerDied","Data":"fa9b039bec586c60153cc47099fde851491eb2e2c1af97a51854e0c46850f414"} Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.062716 4765 generic.go:334] "Generic (PLEG): container finished" podID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerID="5c25e5022d3f67b2367a689a9cf70865a17087f11b9b9853173e30c48e270b2d" exitCode=0 Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.062755 4765 generic.go:334] "Generic (PLEG): container finished" podID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerID="eff0f8e0952a591ccab8adaf9372a01139e40a5b801b999a936af80ed6f7fd8e" exitCode=2 Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.062849 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8847d4d-d633-4ca3-90fa-3384e525864d","Type":"ContainerDied","Data":"5c25e5022d3f67b2367a689a9cf70865a17087f11b9b9853173e30c48e270b2d"} Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.062888 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8847d4d-d633-4ca3-90fa-3384e525864d","Type":"ContainerDied","Data":"eff0f8e0952a591ccab8adaf9372a01139e40a5b801b999a936af80ed6f7fd8e"} Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.085410 4765 generic.go:334] "Generic (PLEG): container finished" podID="b5b765ff-9a5a-48b8-80c7-e8bd56613fcc" containerID="7f551c1327d32f1311b5f2415da3d67dc653cf1705ba6dbc430aac97617ada56" exitCode=0 Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.085987 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell092ce-account-delete-r87ss" event={"ID":"b5b765ff-9a5a-48b8-80c7-e8bd56613fcc","Type":"ContainerDied","Data":"7f551c1327d32f1311b5f2415da3d67dc653cf1705ba6dbc430aac97617ada56"} Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.093810 4765 generic.go:334] "Generic (PLEG): container finished" podID="1bbedff0-5b89-4bbb-b308-6ccb13c8216c" containerID="a85ff52f8967b342b29d29d59b703382c1f6ef448d73456ffa0f05292be1900c" exitCode=0 Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.093881 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron3778-account-delete-5p76n" event={"ID":"1bbedff0-5b89-4bbb-b308-6ccb13c8216c","Type":"ContainerDied","Data":"a85ff52f8967b342b29d29d59b703382c1f6ef448d73456ffa0f05292be1900c"} Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.098020 4765 generic.go:334] "Generic (PLEG): container finished" podID="e6c300c8-c9c7-40c3-8874-236b21eb4856" containerID="e98ba8a1e17537f2f52fedb3f0ff188945fc2a52ebce2eae3f075e33b33804bd" exitCode=2 Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.098118 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e6c300c8-c9c7-40c3-8874-236b21eb4856","Type":"ContainerDied","Data":"e98ba8a1e17537f2f52fedb3f0ff188945fc2a52ebce2eae3f075e33b33804bd"} Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.132788 4765 generic.go:334] "Generic (PLEG): container finished" podID="efa8eaec-19fb-43da-a1f3-557b0847e966" containerID="73d0e7ff7a431b33ef59f1209e48d0aa7cb254124be09d9a85a9829d0d6c40a9" exitCode=0 Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.132913 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"efa8eaec-19fb-43da-a1f3-557b0847e966","Type":"ContainerDied","Data":"73d0e7ff7a431b33ef59f1209e48d0aa7cb254124be09d9a85a9829d0d6c40a9"} Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.142148 4765 generic.go:334] "Generic (PLEG): container finished" podID="f429f6df-3510-4c6a-b9e8-062895218832" containerID="7a879c78d6d7c70bb65a60b88c1cb601644349aa93bf05f1fc08e3e60d091cea" exitCode=0 Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.142265 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi1a4f-account-delete-d276m" event={"ID":"f429f6df-3510-4c6a-b9e8-062895218832","Type":"ContainerDied","Data":"7a879c78d6d7c70bb65a60b88c1cb601644349aa93bf05f1fc08e3e60d091cea"} Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.147756 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placementbd79-account-delete-fm679" event={"ID":"c417c995-d247-48fe-afea-472a698e27f4","Type":"ContainerDied","Data":"e6e1feb07b2ab1c0af54febb53cf41f2d76a8bc8d094a903a2bc867836f98134"} Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.147796 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6e1feb07b2ab1c0af54febb53cf41f2d76a8bc8d094a903a2bc867836f98134" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.147877 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placementbd79-account-delete-fm679" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.164707 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.181337 4765 generic.go:334] "Generic (PLEG): container finished" podID="ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4" containerID="818398cac57dcefcb64574fce10496d462c9aa4895bf95a9491256d4561e080a" exitCode=0 Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.181430 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican43b0-account-delete-tm6mc" event={"ID":"ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4","Type":"ContainerDied","Data":"818398cac57dcefcb64574fce10496d462c9aa4895bf95a9491256d4561e080a"} Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.186455 4765 generic.go:334] "Generic (PLEG): container finished" podID="367a49cf-488a-4852-8728-78dacbfbd500" containerID="db7c3938391cd43e268448d1b8ffd385e79c3741b66633044668f1d83c8facc4" exitCode=0 Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.186521 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" event={"ID":"367a49cf-488a-4852-8728-78dacbfbd500","Type":"ContainerDied","Data":"db7c3938391cd43e268448d1b8ffd385e79c3741b66633044668f1d83c8facc4"} Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.192390 4765 generic.go:334] "Generic (PLEG): container finished" podID="48199101-c7d2-4881-98bd-53d14d7308d5" containerID="e51ededa81fc983915ee20952321442588bb91e3bed29e48234b289ecdd3cfdc" exitCode=0 Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.192467 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"48199101-c7d2-4881-98bd-53d14d7308d5","Type":"ContainerDied","Data":"e51ededa81fc983915ee20952321442588bb91e3bed29e48234b289ecdd3cfdc"} Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.215709 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7df456d776-x4hrk" event={"ID":"a99712e9-cab7-452c-9df1-d94b5c4d96af","Type":"ContainerDied","Data":"2caeb9530028c3e6065c938e2b0efd49e4234ecf49bcff419b6a01776b982ffe"} Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.216795 4765 generic.go:334] "Generic (PLEG): container finished" podID="a99712e9-cab7-452c-9df1-d94b5c4d96af" containerID="2caeb9530028c3e6065c938e2b0efd49e4234ecf49bcff419b6a01776b982ffe" exitCode=0 Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.251891 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.256992 4765 generic.go:334] "Generic (PLEG): container finished" podID="202a617e-eb55-4702-8958-3502b6d8e91b" containerID="975b9afa0ea86dc2438e36b28f792b56c554b75ab840658b6921015304cc0b22" exitCode=0 Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.258234 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"202a617e-eb55-4702-8958-3502b6d8e91b","Type":"ContainerDied","Data":"975b9afa0ea86dc2438e36b28f792b56c554b75ab840658b6921015304cc0b22"} Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.267794 4765 scope.go:117] "RemoveContainer" containerID="121f574801a39c842acc28bbf6ac95cb142b8de010246a252860a5bfffa998f0" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.270326 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.336852 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7whh5\" (UniqueName: \"kubernetes.io/projected/5cbf2f96-d196-413b-841a-9b753e6beae2-kube-api-access-7whh5\") pod \"5cbf2f96-d196-413b-841a-9b753e6beae2\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.336955 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-config-data\") pod \"5cbf2f96-d196-413b-841a-9b753e6beae2\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.336993 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-scripts\") pod \"5cbf2f96-d196-413b-841a-9b753e6beae2\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.337017 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-public-tls-certs\") pod \"5cbf2f96-d196-413b-841a-9b753e6beae2\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.337049 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-combined-ca-bundle\") pod \"5cbf2f96-d196-413b-841a-9b753e6beae2\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.339245 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5cbf2f96-d196-413b-841a-9b753e6beae2-logs\") pod \"5cbf2f96-d196-413b-841a-9b753e6beae2\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.339518 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-internal-tls-certs\") pod \"5cbf2f96-d196-413b-841a-9b753e6beae2\" (UID: \"5cbf2f96-d196-413b-841a-9b753e6beae2\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.340817 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cbf2f96-d196-413b-841a-9b753e6beae2-logs" (OuterVolumeSpecName: "logs") pod "5cbf2f96-d196-413b-841a-9b753e6beae2" (UID: "5cbf2f96-d196-413b-841a-9b753e6beae2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.342184 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cbf2f96-d196-413b-841a-9b753e6beae2-kube-api-access-7whh5" (OuterVolumeSpecName: "kube-api-access-7whh5") pod "5cbf2f96-d196-413b-841a-9b753e6beae2" (UID: "5cbf2f96-d196-413b-841a-9b753e6beae2"). InnerVolumeSpecName "kube-api-access-7whh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.343221 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-scripts" (OuterVolumeSpecName: "scripts") pod "5cbf2f96-d196-413b-841a-9b753e6beae2" (UID: "5cbf2f96-d196-413b-841a-9b753e6beae2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.371196 4765 scope.go:117] "RemoveContainer" containerID="2ac6529be604d7f51ae87512f07a4957271ca6d55e16b66d66b5f6cd885d1250" Dec 10 07:12:50 crc kubenswrapper[4765]: E1210 07:12:50.371802 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ac6529be604d7f51ae87512f07a4957271ca6d55e16b66d66b5f6cd885d1250\": container with ID starting with 2ac6529be604d7f51ae87512f07a4957271ca6d55e16b66d66b5f6cd885d1250 not found: ID does not exist" containerID="2ac6529be604d7f51ae87512f07a4957271ca6d55e16b66d66b5f6cd885d1250" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.371857 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ac6529be604d7f51ae87512f07a4957271ca6d55e16b66d66b5f6cd885d1250"} err="failed to get container status \"2ac6529be604d7f51ae87512f07a4957271ca6d55e16b66d66b5f6cd885d1250\": rpc error: code = NotFound desc = could not find container \"2ac6529be604d7f51ae87512f07a4957271ca6d55e16b66d66b5f6cd885d1250\": container with ID starting with 2ac6529be604d7f51ae87512f07a4957271ca6d55e16b66d66b5f6cd885d1250 not found: ID does not exist" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.371896 4765 scope.go:117] "RemoveContainer" containerID="121f574801a39c842acc28bbf6ac95cb142b8de010246a252860a5bfffa998f0" Dec 10 07:12:50 crc kubenswrapper[4765]: E1210 07:12:50.372363 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"121f574801a39c842acc28bbf6ac95cb142b8de010246a252860a5bfffa998f0\": container with ID starting with 121f574801a39c842acc28bbf6ac95cb142b8de010246a252860a5bfffa998f0 not found: ID does not exist" containerID="121f574801a39c842acc28bbf6ac95cb142b8de010246a252860a5bfffa998f0" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.372401 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"121f574801a39c842acc28bbf6ac95cb142b8de010246a252860a5bfffa998f0"} err="failed to get container status \"121f574801a39c842acc28bbf6ac95cb142b8de010246a252860a5bfffa998f0\": rpc error: code = NotFound desc = could not find container \"121f574801a39c842acc28bbf6ac95cb142b8de010246a252860a5bfffa998f0\": container with ID starting with 121f574801a39c842acc28bbf6ac95cb142b8de010246a252860a5bfffa998f0 not found: ID does not exist" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.372431 4765 scope.go:117] "RemoveContainer" containerID="975b9afa0ea86dc2438e36b28f792b56c554b75ab840658b6921015304cc0b22" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.404315 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.425335 4765 scope.go:117] "RemoveContainer" containerID="280f956e598734a0c953052f0bef830e41258f83b3ea961721502f3e42f78557" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.441050 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-internal-tls-certs\") pod \"202a617e-eb55-4702-8958-3502b6d8e91b\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.441339 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e6c300c8-c9c7-40c3-8874-236b21eb4856-kube-state-metrics-tls-config\") pod \"e6c300c8-c9c7-40c3-8874-236b21eb4856\" (UID: \"e6c300c8-c9c7-40c3-8874-236b21eb4856\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.441429 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-config-data\") pod \"202a617e-eb55-4702-8958-3502b6d8e91b\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.441466 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-combined-ca-bundle\") pod \"202a617e-eb55-4702-8958-3502b6d8e91b\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.441501 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/202a617e-eb55-4702-8958-3502b6d8e91b-logs\") pod \"202a617e-eb55-4702-8958-3502b6d8e91b\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.441560 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/202a617e-eb55-4702-8958-3502b6d8e91b-etc-machine-id\") pod \"202a617e-eb55-4702-8958-3502b6d8e91b\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.441635 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6c300c8-c9c7-40c3-8874-236b21eb4856-kube-state-metrics-tls-certs\") pod \"e6c300c8-c9c7-40c3-8874-236b21eb4856\" (UID: \"e6c300c8-c9c7-40c3-8874-236b21eb4856\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.441708 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-public-tls-certs\") pod \"202a617e-eb55-4702-8958-3502b6d8e91b\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.441818 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6c300c8-c9c7-40c3-8874-236b21eb4856-combined-ca-bundle\") pod \"e6c300c8-c9c7-40c3-8874-236b21eb4856\" (UID: \"e6c300c8-c9c7-40c3-8874-236b21eb4856\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.441859 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnjnf\" (UniqueName: \"kubernetes.io/projected/202a617e-eb55-4702-8958-3502b6d8e91b-kube-api-access-hnjnf\") pod \"202a617e-eb55-4702-8958-3502b6d8e91b\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.442148 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-config-data-custom\") pod \"202a617e-eb55-4702-8958-3502b6d8e91b\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.442181 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k569q\" (UniqueName: \"kubernetes.io/projected/e6c300c8-c9c7-40c3-8874-236b21eb4856-kube-api-access-k569q\") pod \"e6c300c8-c9c7-40c3-8874-236b21eb4856\" (UID: \"e6c300c8-c9c7-40c3-8874-236b21eb4856\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.442707 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-config-data" (OuterVolumeSpecName: "config-data") pod "5cbf2f96-d196-413b-841a-9b753e6beae2" (UID: "5cbf2f96-d196-413b-841a-9b753e6beae2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.442812 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/202a617e-eb55-4702-8958-3502b6d8e91b-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "202a617e-eb55-4702-8958-3502b6d8e91b" (UID: "202a617e-eb55-4702-8958-3502b6d8e91b"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.443136 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-scripts\") pod \"202a617e-eb55-4702-8958-3502b6d8e91b\" (UID: \"202a617e-eb55-4702-8958-3502b6d8e91b\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.443910 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.443928 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.443941 4765 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/202a617e-eb55-4702-8958-3502b6d8e91b-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.445931 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5cbf2f96-d196-413b-841a-9b753e6beae2-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.446059 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7whh5\" (UniqueName: \"kubernetes.io/projected/5cbf2f96-d196-413b-841a-9b753e6beae2-kube-api-access-7whh5\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.448342 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/202a617e-eb55-4702-8958-3502b6d8e91b-logs" (OuterVolumeSpecName: "logs") pod "202a617e-eb55-4702-8958-3502b6d8e91b" (UID: "202a617e-eb55-4702-8958-3502b6d8e91b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.461099 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "202a617e-eb55-4702-8958-3502b6d8e91b" (UID: "202a617e-eb55-4702-8958-3502b6d8e91b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.463433 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6c300c8-c9c7-40c3-8874-236b21eb4856-kube-api-access-k569q" (OuterVolumeSpecName: "kube-api-access-k569q") pod "e6c300c8-c9c7-40c3-8874-236b21eb4856" (UID: "e6c300c8-c9c7-40c3-8874-236b21eb4856"). InnerVolumeSpecName "kube-api-access-k569q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.463792 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5cbf2f96-d196-413b-841a-9b753e6beae2" (UID: "5cbf2f96-d196-413b-841a-9b753e6beae2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.463830 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-scripts" (OuterVolumeSpecName: "scripts") pod "202a617e-eb55-4702-8958-3502b6d8e91b" (UID: "202a617e-eb55-4702-8958-3502b6d8e91b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.484413 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6c300c8-c9c7-40c3-8874-236b21eb4856-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "e6c300c8-c9c7-40c3-8874-236b21eb4856" (UID: "e6c300c8-c9c7-40c3-8874-236b21eb4856"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.484613 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/202a617e-eb55-4702-8958-3502b6d8e91b-kube-api-access-hnjnf" (OuterVolumeSpecName: "kube-api-access-hnjnf") pod "202a617e-eb55-4702-8958-3502b6d8e91b" (UID: "202a617e-eb55-4702-8958-3502b6d8e91b"). InnerVolumeSpecName "kube-api-access-hnjnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.495326 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "202a617e-eb55-4702-8958-3502b6d8e91b" (UID: "202a617e-eb55-4702-8958-3502b6d8e91b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.499228 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6c300c8-c9c7-40c3-8874-236b21eb4856-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e6c300c8-c9c7-40c3-8874-236b21eb4856" (UID: "e6c300c8-c9c7-40c3-8874-236b21eb4856"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.516280 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5cbf2f96-d196-413b-841a-9b753e6beae2" (UID: "5cbf2f96-d196-413b-841a-9b753e6beae2"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.522936 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6c300c8-c9c7-40c3-8874-236b21eb4856-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "e6c300c8-c9c7-40c3-8874-236b21eb4856" (UID: "e6c300c8-c9c7-40c3-8874-236b21eb4856"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.522970 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "202a617e-eb55-4702-8958-3502b6d8e91b" (UID: "202a617e-eb55-4702-8958-3502b6d8e91b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.523446 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "202a617e-eb55-4702-8958-3502b6d8e91b" (UID: "202a617e-eb55-4702-8958-3502b6d8e91b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.525267 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-config-data" (OuterVolumeSpecName: "config-data") pod "202a617e-eb55-4702-8958-3502b6d8e91b" (UID: "202a617e-eb55-4702-8958-3502b6d8e91b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.527637 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5cbf2f96-d196-413b-841a-9b753e6beae2" (UID: "5cbf2f96-d196-413b-841a-9b753e6beae2"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.547556 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-config-data\") pod \"a99712e9-cab7-452c-9df1-d94b5c4d96af\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.547645 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a99712e9-cab7-452c-9df1-d94b5c4d96af-logs\") pod \"a99712e9-cab7-452c-9df1-d94b5c4d96af\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.547706 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-internal-tls-certs\") pod \"a99712e9-cab7-452c-9df1-d94b5c4d96af\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.547779 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-combined-ca-bundle\") pod \"a99712e9-cab7-452c-9df1-d94b5c4d96af\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.547849 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-public-tls-certs\") pod \"a99712e9-cab7-452c-9df1-d94b5c4d96af\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.547887 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-config-data-custom\") pod \"a99712e9-cab7-452c-9df1-d94b5c4d96af\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.547916 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sw8h5\" (UniqueName: \"kubernetes.io/projected/a99712e9-cab7-452c-9df1-d94b5c4d96af-kube-api-access-sw8h5\") pod \"a99712e9-cab7-452c-9df1-d94b5c4d96af\" (UID: \"a99712e9-cab7-452c-9df1-d94b5c4d96af\") " Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.548395 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.548415 4765 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.548429 4765 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e6c300c8-c9c7-40c3-8874-236b21eb4856-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.548447 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.548459 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.548468 4765 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.548476 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/202a617e-eb55-4702-8958-3502b6d8e91b-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.548485 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.548494 4765 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbf2f96-d196-413b-841a-9b753e6beae2-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.548504 4765 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6c300c8-c9c7-40c3-8874-236b21eb4856-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.548513 4765 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.548521 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6c300c8-c9c7-40c3-8874-236b21eb4856-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.548532 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnjnf\" (UniqueName: \"kubernetes.io/projected/202a617e-eb55-4702-8958-3502b6d8e91b-kube-api-access-hnjnf\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.548541 4765 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/202a617e-eb55-4702-8958-3502b6d8e91b-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.548550 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k569q\" (UniqueName: \"kubernetes.io/projected/e6c300c8-c9c7-40c3-8874-236b21eb4856-kube-api-access-k569q\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.551885 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a99712e9-cab7-452c-9df1-d94b5c4d96af-logs" (OuterVolumeSpecName: "logs") pod "a99712e9-cab7-452c-9df1-d94b5c4d96af" (UID: "a99712e9-cab7-452c-9df1-d94b5c4d96af"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.552106 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a99712e9-cab7-452c-9df1-d94b5c4d96af-kube-api-access-sw8h5" (OuterVolumeSpecName: "kube-api-access-sw8h5") pod "a99712e9-cab7-452c-9df1-d94b5c4d96af" (UID: "a99712e9-cab7-452c-9df1-d94b5c4d96af"). InnerVolumeSpecName "kube-api-access-sw8h5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.556213 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a99712e9-cab7-452c-9df1-d94b5c4d96af" (UID: "a99712e9-cab7-452c-9df1-d94b5c4d96af"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.600009 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a99712e9-cab7-452c-9df1-d94b5c4d96af" (UID: "a99712e9-cab7-452c-9df1-d94b5c4d96af"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.610763 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02844920-dce5-4755-a9f7-794a1a95ed34" path="/var/lib/kubelet/pods/02844920-dce5-4755-a9f7-794a1a95ed34/volumes" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.611488 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="054ef117-bca3-4fe9-aeb9-caf1deba8d5e" path="/var/lib/kubelet/pods/054ef117-bca3-4fe9-aeb9-caf1deba8d5e/volumes" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.612949 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27b317a6-1f99-4951-a064-e8ca8a38dc94" path="/var/lib/kubelet/pods/27b317a6-1f99-4951-a064-e8ca8a38dc94/volumes" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.614565 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27" path="/var/lib/kubelet/pods/53b9dbc8-ad83-4e6c-aa18-d0e5ebb6fe27/volumes" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.615309 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2" path="/var/lib/kubelet/pods/6f55e0fb-8faf-47f2-bbb2-7ac4e9a32ff2/volumes" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.615987 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fea25c4-5340-4fab-87cd-22fa2bea0028" path="/var/lib/kubelet/pods/7fea25c4-5340-4fab-87cd-22fa2bea0028/volumes" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.616685 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" path="/var/lib/kubelet/pods/8b2c99d6-f2e1-4c1c-8825-e8c62d00d133/volumes" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.619602 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d180e18-f719-4221-bdaf-a07eb74f2b6f" path="/var/lib/kubelet/pods/9d180e18-f719-4221-bdaf-a07eb74f2b6f/volumes" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.620445 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a76bfc27-647e-4e27-88d4-65c8fc8e1815" path="/var/lib/kubelet/pods/a76bfc27-647e-4e27-88d4-65c8fc8e1815/volumes" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.621003 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfc6fea8-973e-42c9-9482-a4853abec6c1" path="/var/lib/kubelet/pods/cfc6fea8-973e-42c9-9482-a4853abec6c1/volumes" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.621037 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a99712e9-cab7-452c-9df1-d94b5c4d96af" (UID: "a99712e9-cab7-452c-9df1-d94b5c4d96af"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.622249 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4fb93a5-4fe1-41ba-aed0-3f29d881fce5" path="/var/lib/kubelet/pods/d4fb93a5-4fe1-41ba-aed0-3f29d881fce5/volumes" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.622793 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df56d76b-89b4-47f6-a7e6-386d4d49ff43" path="/var/lib/kubelet/pods/df56d76b-89b4-47f6-a7e6-386d4d49ff43/volumes" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.624204 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9af12be-6bc5-4aa8-bb84-135e3c0727cb" path="/var/lib/kubelet/pods/f9af12be-6bc5-4aa8-bb84-135e3c0727cb/volumes" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.626408 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="facc4775-9655-433e-90d8-9dbea8e3ac63" path="/var/lib/kubelet/pods/facc4775-9655-433e-90d8-9dbea8e3ac63/volumes" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.627379 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a99712e9-cab7-452c-9df1-d94b5c4d96af" (UID: "a99712e9-cab7-452c-9df1-d94b5c4d96af"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.630542 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-config-data" (OuterVolumeSpecName: "config-data") pod "a99712e9-cab7-452c-9df1-d94b5c4d96af" (UID: "a99712e9-cab7-452c-9df1-d94b5c4d96af"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.650248 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a99712e9-cab7-452c-9df1-d94b5c4d96af-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.650280 4765 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.650293 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.650301 4765 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.650309 4765 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.650318 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sw8h5\" (UniqueName: \"kubernetes.io/projected/a99712e9-cab7-452c-9df1-d94b5c4d96af-kube-api-access-sw8h5\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:50 crc kubenswrapper[4765]: I1210 07:12:50.650327 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a99712e9-cab7-452c-9df1-d94b5c4d96af-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: E1210 07:12:51.267681 4765 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 10 07:12:51 crc kubenswrapper[4765]: E1210 07:12:51.268060 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-config-data podName:78b416b3-3796-4fa3-8a4f-7fa6107d98a1 nodeName:}" failed. No retries permitted until 2025-12-10 07:12:59.268037298 +0000 UTC m=+1498.994702614 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-config-data") pod "rabbitmq-server-0" (UID: "78b416b3-3796-4fa3-8a4f-7fa6107d98a1") : configmap "rabbitmq-config-data" not found Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.277861 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e6c300c8-c9c7-40c3-8874-236b21eb4856","Type":"ContainerDied","Data":"80c83747377768d2308b28c66acb2bab655ca2d1cd5cfc9a9be2b5c91e048a67"} Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.278152 4765 scope.go:117] "RemoveContainer" containerID="e98ba8a1e17537f2f52fedb3f0ff188945fc2a52ebce2eae3f075e33b33804bd" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.278184 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.287216 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-75b8c6446d-lf487" event={"ID":"5cbf2f96-d196-413b-841a-9b753e6beae2","Type":"ContainerDied","Data":"ce9ab5a6f376ea6cb9dea6d80fddc10af613352a67ef7f387fe0f4517b12d4bd"} Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.287230 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-75b8c6446d-lf487" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.291489 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7df456d776-x4hrk" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.291645 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7df456d776-x4hrk" event={"ID":"a99712e9-cab7-452c-9df1-d94b5c4d96af","Type":"ContainerDied","Data":"1a544e35f2564f5d8ea228e06077634a6913a537f1e9cb12b09d75931027d228"} Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.294663 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"202a617e-eb55-4702-8958-3502b6d8e91b","Type":"ContainerDied","Data":"d4429a815a02743ae623f3f63aad725c357eaadffc8a512562311b004ef73799"} Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.294835 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.304125 4765 generic.go:334] "Generic (PLEG): container finished" podID="d11b24c2-0ac0-4f23-a575-d1b80db4ba11" containerID="48722ecb98b55b709c43a7cab76c40a65934728cb7609e6246ba369957cdbe37" exitCode=0 Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.304194 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d11b24c2-0ac0-4f23-a575-d1b80db4ba11","Type":"ContainerDied","Data":"48722ecb98b55b709c43a7cab76c40a65934728cb7609e6246ba369957cdbe37"} Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.310326 4765 generic.go:334] "Generic (PLEG): container finished" podID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerID="38e7778e406b7c1c45d7bce71effadc0372bfa9af421a74aaf2eef133262aa8c" exitCode=0 Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.310394 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8847d4d-d633-4ca3-90fa-3384e525864d","Type":"ContainerDied","Data":"38e7778e406b7c1c45d7bce71effadc0372bfa9af421a74aaf2eef133262aa8c"} Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.314188 4765 generic.go:334] "Generic (PLEG): container finished" podID="655c5f62-880e-4e05-9db8-da19844facdf" containerID="b203f59bc5f85514c464b901df4f284e9f82d4b70b43ff25aa23de50dd64c527" exitCode=0 Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.314362 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"655c5f62-880e-4e05-9db8-da19844facdf","Type":"ContainerDied","Data":"b203f59bc5f85514c464b901df4f284e9f82d4b70b43ff25aa23de50dd64c527"} Dec 10 07:12:51 crc kubenswrapper[4765]: E1210 07:12:51.360305 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e60c3598bd56315a17f528317b4f8384d07efe59462616bb913d1726a981a607" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 10 07:12:51 crc kubenswrapper[4765]: E1210 07:12:51.367761 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e60c3598bd56315a17f528317b4f8384d07efe59462616bb913d1726a981a607" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 10 07:12:51 crc kubenswrapper[4765]: E1210 07:12:51.384971 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e60c3598bd56315a17f528317b4f8384d07efe59462616bb913d1726a981a607" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 10 07:12:51 crc kubenswrapper[4765]: E1210 07:12:51.385107 4765 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="03838926-8208-43dc-9bfd-6af312a938a4" containerName="nova-cell0-conductor-conductor" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.467975 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.475369 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.493300 4765 scope.go:117] "RemoveContainer" containerID="782ba5c041f2d0683f31e70ec7c7cd0a1637f3847ec96fe8a10620030fff50a0" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.495347 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7df456d776-x4hrk"] Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.497202 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glanceb1c8-account-delete-fghcc" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.503134 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-7df456d776-x4hrk"] Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.506325 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.512485 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.520136 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.521600 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.538532 4765 scope.go:117] "RemoveContainer" containerID="0ec9efaecee0820558e3dfac5cce87e629fece52ce9474ad4a6e8484c9d7a6fb" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.542263 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.548964 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.579759 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-75b8c6446d-lf487"] Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.580948 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nt8jg\" (UniqueName: \"kubernetes.io/projected/367a49cf-488a-4852-8728-78dacbfbd500-kube-api-access-nt8jg\") pod \"367a49cf-488a-4852-8728-78dacbfbd500\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.580996 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-config-data\") pod \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.581033 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g9cwl\" (UniqueName: \"kubernetes.io/projected/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-kube-api-access-g9cwl\") pod \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.581060 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/367a49cf-488a-4852-8728-78dacbfbd500-combined-ca-bundle\") pod \"367a49cf-488a-4852-8728-78dacbfbd500\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.581737 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-logs\") pod \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.581778 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/367a49cf-488a-4852-8728-78dacbfbd500-config-data-custom\") pod \"367a49cf-488a-4852-8728-78dacbfbd500\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.581846 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-nova-metadata-tls-certs\") pod \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.581907 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/367a49cf-488a-4852-8728-78dacbfbd500-logs\") pod \"367a49cf-488a-4852-8728-78dacbfbd500\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.581937 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/367a49cf-488a-4852-8728-78dacbfbd500-config-data\") pod \"367a49cf-488a-4852-8728-78dacbfbd500\" (UID: \"367a49cf-488a-4852-8728-78dacbfbd500\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.581974 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-combined-ca-bundle\") pod \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\" (UID: \"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.582243 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-logs" (OuterVolumeSpecName: "logs") pod "6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" (UID: "6a5f12df-98c4-4ab8-af81-e1b8f2067fcc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.582557 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.582541 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/367a49cf-488a-4852-8728-78dacbfbd500-logs" (OuterVolumeSpecName: "logs") pod "367a49cf-488a-4852-8728-78dacbfbd500" (UID: "367a49cf-488a-4852-8728-78dacbfbd500"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.590538 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-75b8c6446d-lf487"] Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.594410 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/367a49cf-488a-4852-8728-78dacbfbd500-kube-api-access-nt8jg" (OuterVolumeSpecName: "kube-api-access-nt8jg") pod "367a49cf-488a-4852-8728-78dacbfbd500" (UID: "367a49cf-488a-4852-8728-78dacbfbd500"). InnerVolumeSpecName "kube-api-access-nt8jg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.598744 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/367a49cf-488a-4852-8728-78dacbfbd500-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "367a49cf-488a-4852-8728-78dacbfbd500" (UID: "367a49cf-488a-4852-8728-78dacbfbd500"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.607290 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-kube-api-access-g9cwl" (OuterVolumeSpecName: "kube-api-access-g9cwl") pod "6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" (UID: "6a5f12df-98c4-4ab8-af81-e1b8f2067fcc"). InnerVolumeSpecName "kube-api-access-g9cwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.636049 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" (UID: "6a5f12df-98c4-4ab8-af81-e1b8f2067fcc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.638191 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/367a49cf-488a-4852-8728-78dacbfbd500-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "367a49cf-488a-4852-8728-78dacbfbd500" (UID: "367a49cf-488a-4852-8728-78dacbfbd500"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.678971 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-config-data" (OuterVolumeSpecName: "config-data") pod "6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" (UID: "6a5f12df-98c4-4ab8-af81-e1b8f2067fcc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.684269 4765 scope.go:117] "RemoveContainer" containerID="2caeb9530028c3e6065c938e2b0efd49e4234ecf49bcff419b6a01776b982ffe" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.685218 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-scripts\") pod \"b1099ee9-e4d7-496f-b35e-7617ee456898\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.685265 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1099ee9-e4d7-496f-b35e-7617ee456898-logs\") pod \"b1099ee9-e4d7-496f-b35e-7617ee456898\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.685356 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-config-data\") pod \"efa8eaec-19fb-43da-a1f3-557b0847e966\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.685434 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3dbd0d4a-660a-4887-83ae-25c00f54196a-operator-scripts\") pod \"3dbd0d4a-660a-4887-83ae-25c00f54196a\" (UID: \"3dbd0d4a-660a-4887-83ae-25c00f54196a\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.685556 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-combined-ca-bundle\") pod \"b1099ee9-e4d7-496f-b35e-7617ee456898\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.685606 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-public-tls-certs\") pod \"efa8eaec-19fb-43da-a1f3-557b0847e966\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.685633 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-config-data\") pod \"b1099ee9-e4d7-496f-b35e-7617ee456898\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.685650 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-internal-tls-certs\") pod \"efa8eaec-19fb-43da-a1f3-557b0847e966\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.685677 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b1099ee9-e4d7-496f-b35e-7617ee456898-httpd-run\") pod \"b1099ee9-e4d7-496f-b35e-7617ee456898\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.685752 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"b1099ee9-e4d7-496f-b35e-7617ee456898\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.685836 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/efa8eaec-19fb-43da-a1f3-557b0847e966-logs\") pod \"efa8eaec-19fb-43da-a1f3-557b0847e966\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.685887 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-public-tls-certs\") pod \"b1099ee9-e4d7-496f-b35e-7617ee456898\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.686704 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcrqn\" (UniqueName: \"kubernetes.io/projected/3dbd0d4a-660a-4887-83ae-25c00f54196a-kube-api-access-lcrqn\") pod \"3dbd0d4a-660a-4887-83ae-25c00f54196a\" (UID: \"3dbd0d4a-660a-4887-83ae-25c00f54196a\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.686759 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7jlp\" (UniqueName: \"kubernetes.io/projected/b1099ee9-e4d7-496f-b35e-7617ee456898-kube-api-access-j7jlp\") pod \"b1099ee9-e4d7-496f-b35e-7617ee456898\" (UID: \"b1099ee9-e4d7-496f-b35e-7617ee456898\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.686866 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-combined-ca-bundle\") pod \"efa8eaec-19fb-43da-a1f3-557b0847e966\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.686910 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fncrb\" (UniqueName: \"kubernetes.io/projected/efa8eaec-19fb-43da-a1f3-557b0847e966-kube-api-access-fncrb\") pod \"efa8eaec-19fb-43da-a1f3-557b0847e966\" (UID: \"efa8eaec-19fb-43da-a1f3-557b0847e966\") " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.687568 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nt8jg\" (UniqueName: \"kubernetes.io/projected/367a49cf-488a-4852-8728-78dacbfbd500-kube-api-access-nt8jg\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.687594 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.687606 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g9cwl\" (UniqueName: \"kubernetes.io/projected/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-kube-api-access-g9cwl\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.687620 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/367a49cf-488a-4852-8728-78dacbfbd500-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.687632 4765 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/367a49cf-488a-4852-8728-78dacbfbd500-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.687646 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/367a49cf-488a-4852-8728-78dacbfbd500-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.687658 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.688784 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efa8eaec-19fb-43da-a1f3-557b0847e966-logs" (OuterVolumeSpecName: "logs") pod "efa8eaec-19fb-43da-a1f3-557b0847e966" (UID: "efa8eaec-19fb-43da-a1f3-557b0847e966"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.689580 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1099ee9-e4d7-496f-b35e-7617ee456898-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b1099ee9-e4d7-496f-b35e-7617ee456898" (UID: "b1099ee9-e4d7-496f-b35e-7617ee456898"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.690393 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1099ee9-e4d7-496f-b35e-7617ee456898-logs" (OuterVolumeSpecName: "logs") pod "b1099ee9-e4d7-496f-b35e-7617ee456898" (UID: "b1099ee9-e4d7-496f-b35e-7617ee456898"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.691742 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dbd0d4a-660a-4887-83ae-25c00f54196a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3dbd0d4a-660a-4887-83ae-25c00f54196a" (UID: "3dbd0d4a-660a-4887-83ae-25c00f54196a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.705269 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "b1099ee9-e4d7-496f-b35e-7617ee456898" (UID: "b1099ee9-e4d7-496f-b35e-7617ee456898"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.709780 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-scripts" (OuterVolumeSpecName: "scripts") pod "b1099ee9-e4d7-496f-b35e-7617ee456898" (UID: "b1099ee9-e4d7-496f-b35e-7617ee456898"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.714546 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efa8eaec-19fb-43da-a1f3-557b0847e966-kube-api-access-fncrb" (OuterVolumeSpecName: "kube-api-access-fncrb") pod "efa8eaec-19fb-43da-a1f3-557b0847e966" (UID: "efa8eaec-19fb-43da-a1f3-557b0847e966"). InnerVolumeSpecName "kube-api-access-fncrb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.715158 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1099ee9-e4d7-496f-b35e-7617ee456898-kube-api-access-j7jlp" (OuterVolumeSpecName: "kube-api-access-j7jlp") pod "b1099ee9-e4d7-496f-b35e-7617ee456898" (UID: "b1099ee9-e4d7-496f-b35e-7617ee456898"). InnerVolumeSpecName "kube-api-access-j7jlp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.715821 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" (UID: "6a5f12df-98c4-4ab8-af81-e1b8f2067fcc"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.737318 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dbd0d4a-660a-4887-83ae-25c00f54196a-kube-api-access-lcrqn" (OuterVolumeSpecName: "kube-api-access-lcrqn") pod "3dbd0d4a-660a-4887-83ae-25c00f54196a" (UID: "3dbd0d4a-660a-4887-83ae-25c00f54196a"). InnerVolumeSpecName "kube-api-access-lcrqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: E1210 07:12:51.757163 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 10 07:12:51 crc kubenswrapper[4765]: E1210 07:12:51.763790 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 10 07:12:51 crc kubenswrapper[4765]: E1210 07:12:51.765108 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 10 07:12:51 crc kubenswrapper[4765]: E1210 07:12:51.765155 4765 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="7694f523-adf7-4964-b475-6cd94cac7d75" containerName="nova-cell1-conductor-conductor" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.765930 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "efa8eaec-19fb-43da-a1f3-557b0847e966" (UID: "efa8eaec-19fb-43da-a1f3-557b0847e966"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.766050 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1099ee9-e4d7-496f-b35e-7617ee456898" (UID: "b1099ee9-e4d7-496f-b35e-7617ee456898"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.773241 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/367a49cf-488a-4852-8728-78dacbfbd500-config-data" (OuterVolumeSpecName: "config-data") pod "367a49cf-488a-4852-8728-78dacbfbd500" (UID: "367a49cf-488a-4852-8728-78dacbfbd500"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.778753 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-config-data" (OuterVolumeSpecName: "config-data") pod "efa8eaec-19fb-43da-a1f3-557b0847e966" (UID: "efa8eaec-19fb-43da-a1f3-557b0847e966"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.794626 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/367a49cf-488a-4852-8728-78dacbfbd500-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.794891 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.794921 4765 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b1099ee9-e4d7-496f-b35e-7617ee456898-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.794956 4765 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.794972 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/efa8eaec-19fb-43da-a1f3-557b0847e966-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.794983 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcrqn\" (UniqueName: \"kubernetes.io/projected/3dbd0d4a-660a-4887-83ae-25c00f54196a-kube-api-access-lcrqn\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.794998 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7jlp\" (UniqueName: \"kubernetes.io/projected/b1099ee9-e4d7-496f-b35e-7617ee456898-kube-api-access-j7jlp\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.795008 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.795019 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fncrb\" (UniqueName: \"kubernetes.io/projected/efa8eaec-19fb-43da-a1f3-557b0847e966-kube-api-access-fncrb\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.795066 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.795105 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1099ee9-e4d7-496f-b35e-7617ee456898-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.795121 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.795134 4765 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.795148 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3dbd0d4a-660a-4887-83ae-25c00f54196a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.805076 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "efa8eaec-19fb-43da-a1f3-557b0847e966" (UID: "efa8eaec-19fb-43da-a1f3-557b0847e966"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.813870 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b1099ee9-e4d7-496f-b35e-7617ee456898" (UID: "b1099ee9-e4d7-496f-b35e-7617ee456898"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.817520 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-config-data" (OuterVolumeSpecName: "config-data") pod "b1099ee9-e4d7-496f-b35e-7617ee456898" (UID: "b1099ee9-e4d7-496f-b35e-7617ee456898"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: E1210 07:12:51.824817 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="07e96d7bfb948445f7eac9ed07260fdbff76c9bd60d8b7015a2929ae7b5bec3c" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 10 07:12:51 crc kubenswrapper[4765]: E1210 07:12:51.827418 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="07e96d7bfb948445f7eac9ed07260fdbff76c9bd60d8b7015a2929ae7b5bec3c" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 10 07:12:51 crc kubenswrapper[4765]: E1210 07:12:51.830170 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="07e96d7bfb948445f7eac9ed07260fdbff76c9bd60d8b7015a2929ae7b5bec3c" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.830197 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "efa8eaec-19fb-43da-a1f3-557b0847e966" (UID: "efa8eaec-19fb-43da-a1f3-557b0847e966"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:51 crc kubenswrapper[4765]: E1210 07:12:51.830228 4765 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="4b62b966-7b0a-4099-977c-44682f703187" containerName="ovn-northd" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.839824 4765 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.897066 4765 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.897118 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.897252 4765 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/efa8eaec-19fb-43da-a1f3-557b0847e966-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.897264 4765 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:51 crc kubenswrapper[4765]: I1210 07:12:51.897278 4765 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1099ee9-e4d7-496f-b35e-7617ee456898-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: E1210 07:12:52.017430 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 10 07:12:52 crc kubenswrapper[4765]: E1210 07:12:52.017644 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.017751 4765 scope.go:117] "RemoveContainer" containerID="80d3af87b8b64fb1621fe28544a27dbb97a6487a3d0bf2b741bfbc88ebf45779" Dec 10 07:12:52 crc kubenswrapper[4765]: E1210 07:12:52.018843 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 10 07:12:52 crc kubenswrapper[4765]: E1210 07:12:52.019799 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 10 07:12:52 crc kubenswrapper[4765]: E1210 07:12:52.019834 4765 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-v6h5d" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovsdb-server" Dec 10 07:12:52 crc kubenswrapper[4765]: E1210 07:12:52.020500 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 10 07:12:52 crc kubenswrapper[4765]: E1210 07:12:52.024343 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 10 07:12:52 crc kubenswrapper[4765]: E1210 07:12:52.024461 4765 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-v6h5d" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovs-vswitchd" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.026207 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.094567 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.095449 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi1a4f-account-delete-d276m" Dec 10 07:12:52 crc kubenswrapper[4765]: E1210 07:12:52.114737 4765 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 10 07:12:52 crc kubenswrapper[4765]: E1210 07:12:52.114823 4765 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-config-data podName:7d035962-836c-48cf-8ea4-a3e5a23f58f9 nodeName:}" failed. No retries permitted until 2025-12-10 07:13:00.114795983 +0000 UTC m=+1499.841461299 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-config-data") pod "rabbitmq-cell1-server-0" (UID: "7d035962-836c-48cf-8ea4-a3e5a23f58f9") : configmap "rabbitmq-cell1-config-data" not found Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.215582 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48199101-c7d2-4881-98bd-53d14d7308d5-logs\") pod \"48199101-c7d2-4881-98bd-53d14d7308d5\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.215950 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78bfd\" (UniqueName: \"kubernetes.io/projected/d11b24c2-0ac0-4f23-a575-d1b80db4ba11-kube-api-access-78bfd\") pod \"d11b24c2-0ac0-4f23-a575-d1b80db4ba11\" (UID: \"d11b24c2-0ac0-4f23-a575-d1b80db4ba11\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.215972 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bnpt5\" (UniqueName: \"kubernetes.io/projected/f429f6df-3510-4c6a-b9e8-062895218832-kube-api-access-bnpt5\") pod \"f429f6df-3510-4c6a-b9e8-062895218832\" (UID: \"f429f6df-3510-4c6a-b9e8-062895218832\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.216073 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-combined-ca-bundle\") pod \"48199101-c7d2-4881-98bd-53d14d7308d5\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.216153 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-internal-tls-certs\") pod \"48199101-c7d2-4881-98bd-53d14d7308d5\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.216213 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/48199101-c7d2-4881-98bd-53d14d7308d5-httpd-run\") pod \"48199101-c7d2-4881-98bd-53d14d7308d5\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.216231 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d11b24c2-0ac0-4f23-a575-d1b80db4ba11-combined-ca-bundle\") pod \"d11b24c2-0ac0-4f23-a575-d1b80db4ba11\" (UID: \"d11b24c2-0ac0-4f23-a575-d1b80db4ba11\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.216251 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f429f6df-3510-4c6a-b9e8-062895218832-operator-scripts\") pod \"f429f6df-3510-4c6a-b9e8-062895218832\" (UID: \"f429f6df-3510-4c6a-b9e8-062895218832\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.216289 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-scripts\") pod \"48199101-c7d2-4881-98bd-53d14d7308d5\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.216310 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-config-data\") pod \"48199101-c7d2-4881-98bd-53d14d7308d5\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.216352 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z787f\" (UniqueName: \"kubernetes.io/projected/48199101-c7d2-4881-98bd-53d14d7308d5-kube-api-access-z787f\") pod \"48199101-c7d2-4881-98bd-53d14d7308d5\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.216375 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"48199101-c7d2-4881-98bd-53d14d7308d5\" (UID: \"48199101-c7d2-4881-98bd-53d14d7308d5\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.216410 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d11b24c2-0ac0-4f23-a575-d1b80db4ba11-config-data\") pod \"d11b24c2-0ac0-4f23-a575-d1b80db4ba11\" (UID: \"d11b24c2-0ac0-4f23-a575-d1b80db4ba11\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.218061 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48199101-c7d2-4881-98bd-53d14d7308d5-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "48199101-c7d2-4881-98bd-53d14d7308d5" (UID: "48199101-c7d2-4881-98bd-53d14d7308d5"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.220976 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48199101-c7d2-4881-98bd-53d14d7308d5-logs" (OuterVolumeSpecName: "logs") pod "48199101-c7d2-4881-98bd-53d14d7308d5" (UID: "48199101-c7d2-4881-98bd-53d14d7308d5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.227740 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f429f6df-3510-4c6a-b9e8-062895218832-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f429f6df-3510-4c6a-b9e8-062895218832" (UID: "f429f6df-3510-4c6a-b9e8-062895218832"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.230238 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "48199101-c7d2-4881-98bd-53d14d7308d5" (UID: "48199101-c7d2-4881-98bd-53d14d7308d5"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.233031 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f429f6df-3510-4c6a-b9e8-062895218832-kube-api-access-bnpt5" (OuterVolumeSpecName: "kube-api-access-bnpt5") pod "f429f6df-3510-4c6a-b9e8-062895218832" (UID: "f429f6df-3510-4c6a-b9e8-062895218832"). InnerVolumeSpecName "kube-api-access-bnpt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.240912 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-scripts" (OuterVolumeSpecName: "scripts") pod "48199101-c7d2-4881-98bd-53d14d7308d5" (UID: "48199101-c7d2-4881-98bd-53d14d7308d5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.241100 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48199101-c7d2-4881-98bd-53d14d7308d5-kube-api-access-z787f" (OuterVolumeSpecName: "kube-api-access-z787f") pod "48199101-c7d2-4881-98bd-53d14d7308d5" (UID: "48199101-c7d2-4881-98bd-53d14d7308d5"). InnerVolumeSpecName "kube-api-access-z787f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.242293 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d11b24c2-0ac0-4f23-a575-d1b80db4ba11-kube-api-access-78bfd" (OuterVolumeSpecName: "kube-api-access-78bfd") pod "d11b24c2-0ac0-4f23-a575-d1b80db4ba11" (UID: "d11b24c2-0ac0-4f23-a575-d1b80db4ba11"). InnerVolumeSpecName "kube-api-access-78bfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.257809 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d11b24c2-0ac0-4f23-a575-d1b80db4ba11-config-data" (OuterVolumeSpecName: "config-data") pod "d11b24c2-0ac0-4f23-a575-d1b80db4ba11" (UID: "d11b24c2-0ac0-4f23-a575-d1b80db4ba11"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.268556 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.271451 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "48199101-c7d2-4881-98bd-53d14d7308d5" (UID: "48199101-c7d2-4881-98bd-53d14d7308d5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.275310 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d11b24c2-0ac0-4f23-a575-d1b80db4ba11-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d11b24c2-0ac0-4f23-a575-d1b80db4ba11" (UID: "d11b24c2-0ac0-4f23-a575-d1b80db4ba11"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.278309 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron3778-account-delete-5p76n" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.278345 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell092ce-account-delete-r87ss" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.283641 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "48199101-c7d2-4881-98bd-53d14d7308d5" (UID: "48199101-c7d2-4881-98bd-53d14d7308d5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.283809 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-config-data" (OuterVolumeSpecName: "config-data") pod "48199101-c7d2-4881-98bd-53d14d7308d5" (UID: "48199101-c7d2-4881-98bd-53d14d7308d5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.291773 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder1abb-account-delete-h8mp2" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.295627 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican43b0-account-delete-tm6mc" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.319662 4765 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/48199101-c7d2-4881-98bd-53d14d7308d5-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.319700 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d11b24c2-0ac0-4f23-a575-d1b80db4ba11-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.319713 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f429f6df-3510-4c6a-b9e8-062895218832-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.319727 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.319738 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.319750 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z787f\" (UniqueName: \"kubernetes.io/projected/48199101-c7d2-4881-98bd-53d14d7308d5-kube-api-access-z787f\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.319779 4765 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.319790 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d11b24c2-0ac0-4f23-a575-d1b80db4ba11-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.319801 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48199101-c7d2-4881-98bd-53d14d7308d5-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.319812 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78bfd\" (UniqueName: \"kubernetes.io/projected/d11b24c2-0ac0-4f23-a575-d1b80db4ba11-kube-api-access-78bfd\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.319826 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bnpt5\" (UniqueName: \"kubernetes.io/projected/f429f6df-3510-4c6a-b9e8-062895218832-kube-api-access-bnpt5\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.319837 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.319848 4765 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/48199101-c7d2-4881-98bd-53d14d7308d5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.347653 4765 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.348700 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6a5f12df-98c4-4ab8-af81-e1b8f2067fcc","Type":"ContainerDied","Data":"12468ee67d4418f28062a3254399c17eb47a78c5eedd56e8ee6011bccb953549"} Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.348830 4765 scope.go:117] "RemoveContainer" containerID="49d173bcd82030e3728295fd26cf9ba46fd74b265743bf325402dc0c5ce12e34" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.349007 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.362882 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"48199101-c7d2-4881-98bd-53d14d7308d5","Type":"ContainerDied","Data":"381b0f39c40da82fd302ed8b05b7fb0835e8579bf2ab1f78541c95f23b62aade"} Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.362960 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.389209 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican43b0-account-delete-tm6mc" event={"ID":"ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4","Type":"ContainerDied","Data":"199b129a31c27fc5188f99fc68d6303ddc285b789dbe2af736f16ac465af972f"} Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.389278 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="199b129a31c27fc5188f99fc68d6303ddc285b789dbe2af736f16ac465af972f" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.389368 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican43b0-account-delete-tm6mc" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.394160 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"655c5f62-880e-4e05-9db8-da19844facdf","Type":"ContainerDied","Data":"5ef7aedd74056f10d17cbb5e760a66ab97a01ecc4c8d1a7ab568a61774158e45"} Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.394489 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.395402 4765 scope.go:117] "RemoveContainer" containerID="f707edae2a556f90532dcf66301a00a51c4aade0f025e60306c8ac207c349e6b" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.406782 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron3778-account-delete-5p76n" event={"ID":"1bbedff0-5b89-4bbb-b308-6ccb13c8216c","Type":"ContainerDied","Data":"46f2ad887ea199ee0b460ae4c0b62ff3105c158f8f030b5f8d87d7ce3a8cf87a"} Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.406835 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46f2ad887ea199ee0b460ae4c0b62ff3105c158f8f030b5f8d87d7ce3a8cf87a" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.406973 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron3778-account-delete-5p76n" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.417909 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.417957 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glanceb1c8-account-delete-fghcc" event={"ID":"3dbd0d4a-660a-4887-83ae-25c00f54196a","Type":"ContainerDied","Data":"1f0fda5394f1c52fd966b0eeaa38ca5acb14870e459355a8e1f4ea2b3771da73"} Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.417990 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f0fda5394f1c52fd966b0eeaa38ca5acb14870e459355a8e1f4ea2b3771da73" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.418028 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glanceb1c8-account-delete-fghcc" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.421036 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/655c5f62-880e-4e05-9db8-da19844facdf-memcached-tls-certs\") pod \"655c5f62-880e-4e05-9db8-da19844facdf\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.421154 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/655c5f62-880e-4e05-9db8-da19844facdf-config-data\") pod \"655c5f62-880e-4e05-9db8-da19844facdf\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.421191 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hpz57\" (UniqueName: \"kubernetes.io/projected/1fec9b72-da40-4d40-9f56-5eca02badaba-kube-api-access-hpz57\") pod \"1fec9b72-da40-4d40-9f56-5eca02badaba\" (UID: \"1fec9b72-da40-4d40-9f56-5eca02badaba\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.421251 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4-operator-scripts\") pod \"ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4\" (UID: \"ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.421344 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5b765ff-9a5a-48b8-80c7-e8bd56613fcc-operator-scripts\") pod \"b5b765ff-9a5a-48b8-80c7-e8bd56613fcc\" (UID: \"b5b765ff-9a5a-48b8-80c7-e8bd56613fcc\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.421381 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8trm\" (UniqueName: \"kubernetes.io/projected/b5b765ff-9a5a-48b8-80c7-e8bd56613fcc-kube-api-access-f8trm\") pod \"b5b765ff-9a5a-48b8-80c7-e8bd56613fcc\" (UID: \"b5b765ff-9a5a-48b8-80c7-e8bd56613fcc\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.421409 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgmdz\" (UniqueName: \"kubernetes.io/projected/655c5f62-880e-4e05-9db8-da19844facdf-kube-api-access-bgmdz\") pod \"655c5f62-880e-4e05-9db8-da19844facdf\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.421474 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655c5f62-880e-4e05-9db8-da19844facdf-combined-ca-bundle\") pod \"655c5f62-880e-4e05-9db8-da19844facdf\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.421506 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fec9b72-da40-4d40-9f56-5eca02badaba-operator-scripts\") pod \"1fec9b72-da40-4d40-9f56-5eca02badaba\" (UID: \"1fec9b72-da40-4d40-9f56-5eca02badaba\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.421531 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzdfj\" (UniqueName: \"kubernetes.io/projected/ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4-kube-api-access-kzdfj\") pod \"ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4\" (UID: \"ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.421569 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/655c5f62-880e-4e05-9db8-da19844facdf-kolla-config\") pod \"655c5f62-880e-4e05-9db8-da19844facdf\" (UID: \"655c5f62-880e-4e05-9db8-da19844facdf\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.421627 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cqdd\" (UniqueName: \"kubernetes.io/projected/1bbedff0-5b89-4bbb-b308-6ccb13c8216c-kube-api-access-6cqdd\") pod \"1bbedff0-5b89-4bbb-b308-6ccb13c8216c\" (UID: \"1bbedff0-5b89-4bbb-b308-6ccb13c8216c\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.421686 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bbedff0-5b89-4bbb-b308-6ccb13c8216c-operator-scripts\") pod \"1bbedff0-5b89-4bbb-b308-6ccb13c8216c\" (UID: \"1bbedff0-5b89-4bbb-b308-6ccb13c8216c\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.423664 4765 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.424975 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bbedff0-5b89-4bbb-b308-6ccb13c8216c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1bbedff0-5b89-4bbb-b308-6ccb13c8216c" (UID: "1bbedff0-5b89-4bbb-b308-6ccb13c8216c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.425028 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/655c5f62-880e-4e05-9db8-da19844facdf-config-data" (OuterVolumeSpecName: "config-data") pod "655c5f62-880e-4e05-9db8-da19844facdf" (UID: "655c5f62-880e-4e05-9db8-da19844facdf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.425635 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/655c5f62-880e-4e05-9db8-da19844facdf-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "655c5f62-880e-4e05-9db8-da19844facdf" (UID: "655c5f62-880e-4e05-9db8-da19844facdf"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.426079 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b5b765ff-9a5a-48b8-80c7-e8bd56613fcc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b5b765ff-9a5a-48b8-80c7-e8bd56613fcc" (UID: "b5b765ff-9a5a-48b8-80c7-e8bd56613fcc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.431877 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fec9b72-da40-4d40-9f56-5eca02badaba-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1fec9b72-da40-4d40-9f56-5eca02badaba" (UID: "1fec9b72-da40-4d40-9f56-5eca02badaba"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.432372 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4" (UID: "ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.436951 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5b765ff-9a5a-48b8-80c7-e8bd56613fcc-kube-api-access-f8trm" (OuterVolumeSpecName: "kube-api-access-f8trm") pod "b5b765ff-9a5a-48b8-80c7-e8bd56613fcc" (UID: "b5b765ff-9a5a-48b8-80c7-e8bd56613fcc"). InnerVolumeSpecName "kube-api-access-f8trm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.437148 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.440927 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4-kube-api-access-kzdfj" (OuterVolumeSpecName: "kube-api-access-kzdfj") pod "ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4" (UID: "ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4"). InnerVolumeSpecName "kube-api-access-kzdfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.441027 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fec9b72-da40-4d40-9f56-5eca02badaba-kube-api-access-hpz57" (OuterVolumeSpecName: "kube-api-access-hpz57") pod "1fec9b72-da40-4d40-9f56-5eca02badaba" (UID: "1fec9b72-da40-4d40-9f56-5eca02badaba"). InnerVolumeSpecName "kube-api-access-hpz57". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.441769 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/655c5f62-880e-4e05-9db8-da19844facdf-kube-api-access-bgmdz" (OuterVolumeSpecName: "kube-api-access-bgmdz") pod "655c5f62-880e-4e05-9db8-da19844facdf" (UID: "655c5f62-880e-4e05-9db8-da19844facdf"). InnerVolumeSpecName "kube-api-access-bgmdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.447021 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bbedff0-5b89-4bbb-b308-6ccb13c8216c-kube-api-access-6cqdd" (OuterVolumeSpecName: "kube-api-access-6cqdd") pod "1bbedff0-5b89-4bbb-b308-6ccb13c8216c" (UID: "1bbedff0-5b89-4bbb-b308-6ccb13c8216c"). InnerVolumeSpecName "kube-api-access-6cqdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.463536 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.472416 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/655c5f62-880e-4e05-9db8-da19844facdf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "655c5f62-880e-4e05-9db8-da19844facdf" (UID: "655c5f62-880e-4e05-9db8-da19844facdf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.479903 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b1099ee9-e4d7-496f-b35e-7617ee456898","Type":"ContainerDied","Data":"4bca801116da3647c162bffb7d23aa64d681ee0510e8f7af5ae07cbe245a43e4"} Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.479947 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.483356 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.506917 4765 scope.go:117] "RemoveContainer" containerID="e51ededa81fc983915ee20952321442588bb91e3bed29e48234b289ecdd3cfdc" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.508664 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_4b62b966-7b0a-4099-977c-44682f703187/ovn-northd/0.log" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.508708 4765 generic.go:334] "Generic (PLEG): container finished" podID="4b62b966-7b0a-4099-977c-44682f703187" containerID="07e96d7bfb948445f7eac9ed07260fdbff76c9bd60d8b7015a2929ae7b5bec3c" exitCode=139 Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.508824 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"4b62b966-7b0a-4099-977c-44682f703187","Type":"ContainerDied","Data":"07e96d7bfb948445f7eac9ed07260fdbff76c9bd60d8b7015a2929ae7b5bec3c"} Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.514408 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/655c5f62-880e-4e05-9db8-da19844facdf-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "655c5f62-880e-4e05-9db8-da19844facdf" (UID: "655c5f62-880e-4e05-9db8-da19844facdf"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.517771 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glanceb1c8-account-delete-fghcc"] Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.526770 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5b765ff-9a5a-48b8-80c7-e8bd56613fcc-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.527579 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8trm\" (UniqueName: \"kubernetes.io/projected/b5b765ff-9a5a-48b8-80c7-e8bd56613fcc-kube-api-access-f8trm\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.527753 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgmdz\" (UniqueName: \"kubernetes.io/projected/655c5f62-880e-4e05-9db8-da19844facdf-kube-api-access-bgmdz\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.527827 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655c5f62-880e-4e05-9db8-da19844facdf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.527937 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fec9b72-da40-4d40-9f56-5eca02badaba-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.528015 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzdfj\" (UniqueName: \"kubernetes.io/projected/ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4-kube-api-access-kzdfj\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.528105 4765 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/655c5f62-880e-4e05-9db8-da19844facdf-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.528178 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cqdd\" (UniqueName: \"kubernetes.io/projected/1bbedff0-5b89-4bbb-b308-6ccb13c8216c-kube-api-access-6cqdd\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.528257 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bbedff0-5b89-4bbb-b308-6ccb13c8216c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.528342 4765 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/655c5f62-880e-4e05-9db8-da19844facdf-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.528410 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/655c5f62-880e-4e05-9db8-da19844facdf-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.528481 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hpz57\" (UniqueName: \"kubernetes.io/projected/1fec9b72-da40-4d40-9f56-5eca02badaba-kube-api-access-hpz57\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.528562 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.528748 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d11b24c2-0ac0-4f23-a575-d1b80db4ba11","Type":"ContainerDied","Data":"25d20b42efcbb9318336c3e6f68eddac50d9caae7887edef1e9bf6045ca41622"} Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.528916 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.533746 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell092ce-account-delete-r87ss" event={"ID":"b5b765ff-9a5a-48b8-80c7-e8bd56613fcc","Type":"ContainerDied","Data":"a905958259d944135a9edcf564287cdcc035dce37230c5ffca81eca103511a25"} Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.534005 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell092ce-account-delete-r87ss" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.535039 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glanceb1c8-account-delete-fghcc"] Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.554606 4765 scope.go:117] "RemoveContainer" containerID="b362cb9831e6868cc5bfea4106470f04254c397b3cd98c304dac462e618c6408" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.580879 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.586010 4765 generic.go:334] "Generic (PLEG): container finished" podID="78b416b3-3796-4fa3-8a4f-7fa6107d98a1" containerID="60f858d2d85b01e9da2a6a95ed2be831935e234ad3033222a4291f2052f6ce52" exitCode=0 Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.586111 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"78b416b3-3796-4fa3-8a4f-7fa6107d98a1","Type":"ContainerDied","Data":"60f858d2d85b01e9da2a6a95ed2be831935e234ad3033222a4291f2052f6ce52"} Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.603890 4765 scope.go:117] "RemoveContainer" containerID="b203f59bc5f85514c464b901df4f284e9f82d4b70b43ff25aa23de50dd64c527" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.604465 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.611652 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="202a617e-eb55-4702-8958-3502b6d8e91b" path="/var/lib/kubelet/pods/202a617e-eb55-4702-8958-3502b6d8e91b/volumes" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.612930 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dbd0d4a-660a-4887-83ae-25c00f54196a" path="/var/lib/kubelet/pods/3dbd0d4a-660a-4887-83ae-25c00f54196a/volumes" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.613856 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48199101-c7d2-4881-98bd-53d14d7308d5" path="/var/lib/kubelet/pods/48199101-c7d2-4881-98bd-53d14d7308d5/volumes" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.615179 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cbf2f96-d196-413b-841a-9b753e6beae2" path="/var/lib/kubelet/pods/5cbf2f96-d196-413b-841a-9b753e6beae2/volumes" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.615929 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" path="/var/lib/kubelet/pods/6a5f12df-98c4-4ab8-af81-e1b8f2067fcc/volumes" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.617358 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a99712e9-cab7-452c-9df1-d94b5c4d96af" path="/var/lib/kubelet/pods/a99712e9-cab7-452c-9df1-d94b5c4d96af/volumes" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.618427 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6c300c8-c9c7-40c3-8874-236b21eb4856" path="/var/lib/kubelet/pods/e6c300c8-c9c7-40c3-8874-236b21eb4856/volumes" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.638072 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-csxp5\" (UniqueName: \"kubernetes.io/projected/949ef1d3-9f74-4052-a482-9fea4e48d374-kube-api-access-csxp5\") pod \"949ef1d3-9f74-4052-a482-9fea4e48d374\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.638207 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/949ef1d3-9f74-4052-a482-9fea4e48d374-config-data-default\") pod \"949ef1d3-9f74-4052-a482-9fea4e48d374\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.638268 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/949ef1d3-9f74-4052-a482-9fea4e48d374-kolla-config\") pod \"949ef1d3-9f74-4052-a482-9fea4e48d374\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.638386 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/949ef1d3-9f74-4052-a482-9fea4e48d374-galera-tls-certs\") pod \"949ef1d3-9f74-4052-a482-9fea4e48d374\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.638438 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/949ef1d3-9f74-4052-a482-9fea4e48d374-config-data-generated\") pod \"949ef1d3-9f74-4052-a482-9fea4e48d374\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.638480 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/949ef1d3-9f74-4052-a482-9fea4e48d374-operator-scripts\") pod \"949ef1d3-9f74-4052-a482-9fea4e48d374\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.638522 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"949ef1d3-9f74-4052-a482-9fea4e48d374\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.638544 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/949ef1d3-9f74-4052-a482-9fea4e48d374-combined-ca-bundle\") pod \"949ef1d3-9f74-4052-a482-9fea4e48d374\" (UID: \"949ef1d3-9f74-4052-a482-9fea4e48d374\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.641681 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/949ef1d3-9f74-4052-a482-9fea4e48d374-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "949ef1d3-9f74-4052-a482-9fea4e48d374" (UID: "949ef1d3-9f74-4052-a482-9fea4e48d374"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.641750 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/949ef1d3-9f74-4052-a482-9fea4e48d374-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "949ef1d3-9f74-4052-a482-9fea4e48d374" (UID: "949ef1d3-9f74-4052-a482-9fea4e48d374"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.642833 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/949ef1d3-9f74-4052-a482-9fea4e48d374-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "949ef1d3-9f74-4052-a482-9fea4e48d374" (UID: "949ef1d3-9f74-4052-a482-9fea4e48d374"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.645358 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/949ef1d3-9f74-4052-a482-9fea4e48d374-kube-api-access-csxp5" (OuterVolumeSpecName: "kube-api-access-csxp5") pod "949ef1d3-9f74-4052-a482-9fea4e48d374" (UID: "949ef1d3-9f74-4052-a482-9fea4e48d374"). InnerVolumeSpecName "kube-api-access-csxp5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.645663 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/949ef1d3-9f74-4052-a482-9fea4e48d374-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "949ef1d3-9f74-4052-a482-9fea4e48d374" (UID: "949ef1d3-9f74-4052-a482-9fea4e48d374"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.647685 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.666390 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi1a4f-account-delete-d276m" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.678825 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "mysql-db") pod "949ef1d3-9f74-4052-a482-9fea4e48d374" (UID: "949ef1d3-9f74-4052-a482-9fea4e48d374"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.695258 4765 generic.go:334] "Generic (PLEG): container finished" podID="949ef1d3-9f74-4052-a482-9fea4e48d374" containerID="df8d58bedf5e00ee650bbe91fcb03935b51f76f914be11d02374897423dbf609" exitCode=0 Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.696438 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.699611 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder1abb-account-delete-h8mp2" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.713875 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/949ef1d3-9f74-4052-a482-9fea4e48d374-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "949ef1d3-9f74-4052-a482-9fea4e48d374" (UID: "949ef1d3-9f74-4052-a482-9fea4e48d374"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.726673 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/949ef1d3-9f74-4052-a482-9fea4e48d374-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "949ef1d3-9f74-4052-a482-9fea4e48d374" (UID: "949ef1d3-9f74-4052-a482-9fea4e48d374"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.744159 4765 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/949ef1d3-9f74-4052-a482-9fea4e48d374-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.744261 4765 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.744277 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/949ef1d3-9f74-4052-a482-9fea4e48d374-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.744290 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-csxp5\" (UniqueName: \"kubernetes.io/projected/949ef1d3-9f74-4052-a482-9fea4e48d374-kube-api-access-csxp5\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.744305 4765 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/949ef1d3-9f74-4052-a482-9fea4e48d374-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.744318 4765 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/949ef1d3-9f74-4052-a482-9fea4e48d374-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.744349 4765 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/949ef1d3-9f74-4052-a482-9fea4e48d374-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.744361 4765 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/949ef1d3-9f74-4052-a482-9fea4e48d374-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.772062 4765 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.794330 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7756b4f44b-9575x" event={"ID":"367a49cf-488a-4852-8728-78dacbfbd500","Type":"ContainerDied","Data":"d25984e152e6eb632f787bf2b18ebb3a4a22ad06817f65a6d37d26b1554ed376"} Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.794373 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.794398 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.794439 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell092ce-account-delete-r87ss"] Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.794452 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novacell092ce-account-delete-r87ss"] Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.794470 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"efa8eaec-19fb-43da-a1f3-557b0847e966","Type":"ContainerDied","Data":"e622eed2da2518c15d0f34c6464c0c6813f78d3244a0c9d6859617bb6b5c9a8a"} Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.794495 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi1a4f-account-delete-d276m" event={"ID":"f429f6df-3510-4c6a-b9e8-062895218832","Type":"ContainerDied","Data":"516aec2074d0d4765cac6d3b8311aa558452b2d93db2a6a499432d415d50e077"} Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.794514 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="516aec2074d0d4765cac6d3b8311aa558452b2d93db2a6a499432d415d50e077" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.794529 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"949ef1d3-9f74-4052-a482-9fea4e48d374","Type":"ContainerDied","Data":"df8d58bedf5e00ee650bbe91fcb03935b51f76f914be11d02374897423dbf609"} Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.794544 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder1abb-account-delete-h8mp2" event={"ID":"1fec9b72-da40-4d40-9f56-5eca02badaba","Type":"ContainerDied","Data":"29d018ef434d38b31d6d6d5d4f43440bc92ef5dfc063b44f41213dc8d02084d5"} Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.794558 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29d018ef434d38b31d6d6d5d4f43440bc92ef5dfc063b44f41213dc8d02084d5" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.810449 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/keystone-586f9fc866-46mgw" podUID="857ecb8b-72dc-40a7-a407-d85c40c40bcf" containerName="keystone-api" probeResult="failure" output="Get \"https://10.217.0.154:5000/v3\": read tcp 10.217.0.2:43678->10.217.0.154:5000: read: connection reset by peer" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.813178 4765 scope.go:117] "RemoveContainer" containerID="90ca341c2978c3ac47bd6f8955762450564583c1ccb4813fdb2ffc303391ba52" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.819928 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_4b62b966-7b0a-4099-977c-44682f703187/ovn-northd/0.log" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.820022 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.845063 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b62b966-7b0a-4099-977c-44682f703187-metrics-certs-tls-certs\") pod \"4b62b966-7b0a-4099-977c-44682f703187\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.845235 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b62b966-7b0a-4099-977c-44682f703187-ovn-northd-tls-certs\") pod \"4b62b966-7b0a-4099-977c-44682f703187\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.845310 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4b62b966-7b0a-4099-977c-44682f703187-ovn-rundir\") pod \"4b62b966-7b0a-4099-977c-44682f703187\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.845365 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkwgp\" (UniqueName: \"kubernetes.io/projected/4b62b966-7b0a-4099-977c-44682f703187-kube-api-access-bkwgp\") pod \"4b62b966-7b0a-4099-977c-44682f703187\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.845392 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b62b966-7b0a-4099-977c-44682f703187-config\") pod \"4b62b966-7b0a-4099-977c-44682f703187\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.845414 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b62b966-7b0a-4099-977c-44682f703187-combined-ca-bundle\") pod \"4b62b966-7b0a-4099-977c-44682f703187\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.845474 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4b62b966-7b0a-4099-977c-44682f703187-scripts\") pod \"4b62b966-7b0a-4099-977c-44682f703187\" (UID: \"4b62b966-7b0a-4099-977c-44682f703187\") " Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.845859 4765 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.846641 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b62b966-7b0a-4099-977c-44682f703187-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "4b62b966-7b0a-4099-977c-44682f703187" (UID: "4b62b966-7b0a-4099-977c-44682f703187"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.846902 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b62b966-7b0a-4099-977c-44682f703187-config" (OuterVolumeSpecName: "config") pod "4b62b966-7b0a-4099-977c-44682f703187" (UID: "4b62b966-7b0a-4099-977c-44682f703187"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.847853 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b62b966-7b0a-4099-977c-44682f703187-scripts" (OuterVolumeSpecName: "scripts") pod "4b62b966-7b0a-4099-977c-44682f703187" (UID: "4b62b966-7b0a-4099-977c-44682f703187"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.854768 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b62b966-7b0a-4099-977c-44682f703187-kube-api-access-bkwgp" (OuterVolumeSpecName: "kube-api-access-bkwgp") pod "4b62b966-7b0a-4099-977c-44682f703187" (UID: "4b62b966-7b0a-4099-977c-44682f703187"). InnerVolumeSpecName "kube-api-access-bkwgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.882682 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b62b966-7b0a-4099-977c-44682f703187-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4b62b966-7b0a-4099-977c-44682f703187" (UID: "4b62b966-7b0a-4099-977c-44682f703187"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.912550 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b62b966-7b0a-4099-977c-44682f703187-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "4b62b966-7b0a-4099-977c-44682f703187" (UID: "4b62b966-7b0a-4099-977c-44682f703187"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.912572 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b62b966-7b0a-4099-977c-44682f703187-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "4b62b966-7b0a-4099-977c-44682f703187" (UID: "4b62b966-7b0a-4099-977c-44682f703187"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.947484 4765 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4b62b966-7b0a-4099-977c-44682f703187-ovn-rundir\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.947535 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkwgp\" (UniqueName: \"kubernetes.io/projected/4b62b966-7b0a-4099-977c-44682f703187-kube-api-access-bkwgp\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.947547 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b62b966-7b0a-4099-977c-44682f703187-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.947561 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b62b966-7b0a-4099-977c-44682f703187-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.947571 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4b62b966-7b0a-4099-977c-44682f703187-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.947583 4765 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b62b966-7b0a-4099-977c-44682f703187-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.947594 4765 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b62b966-7b0a-4099-977c-44682f703187-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.967960 4765 scope.go:117] "RemoveContainer" containerID="532e6ef7ce06ebf94ad58b81fb2331379d4d7d0f9d8d702b8370e40c41c43459" Dec 10 07:12:52 crc kubenswrapper[4765]: I1210 07:12:52.976482 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.036429 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-7756b4f44b-9575x"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.036897 4765 scope.go:117] "RemoveContainer" containerID="48722ecb98b55b709c43a7cab76c40a65934728cb7609e6246ba369957cdbe37" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.048297 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-erlang-cookie-secret\") pod \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.048350 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-server-conf\") pod \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.048384 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-config-data\") pod \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.048411 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2l6j\" (UniqueName: \"kubernetes.io/projected/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-kube-api-access-j2l6j\") pod \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.048454 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-confd\") pod \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.048501 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-plugins-conf\") pod \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.048564 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-tls\") pod \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.048652 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.048727 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-erlang-cookie\") pod \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.048777 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-plugins\") pod \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.048803 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-pod-info\") pod \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\" (UID: \"78b416b3-3796-4fa3-8a4f-7fa6107d98a1\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.049795 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "78b416b3-3796-4fa3-8a4f-7fa6107d98a1" (UID: "78b416b3-3796-4fa3-8a4f-7fa6107d98a1"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.049827 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "78b416b3-3796-4fa3-8a4f-7fa6107d98a1" (UID: "78b416b3-3796-4fa3-8a4f-7fa6107d98a1"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.050328 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "78b416b3-3796-4fa3-8a4f-7fa6107d98a1" (UID: "78b416b3-3796-4fa3-8a4f-7fa6107d98a1"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.052841 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "78b416b3-3796-4fa3-8a4f-7fa6107d98a1" (UID: "78b416b3-3796-4fa3-8a4f-7fa6107d98a1"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.054061 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-pod-info" (OuterVolumeSpecName: "pod-info") pod "78b416b3-3796-4fa3-8a4f-7fa6107d98a1" (UID: "78b416b3-3796-4fa3-8a4f-7fa6107d98a1"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.055041 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-kube-api-access-j2l6j" (OuterVolumeSpecName: "kube-api-access-j2l6j") pod "78b416b3-3796-4fa3-8a4f-7fa6107d98a1" (UID: "78b416b3-3796-4fa3-8a4f-7fa6107d98a1"). InnerVolumeSpecName "kube-api-access-j2l6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.055476 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "78b416b3-3796-4fa3-8a4f-7fa6107d98a1" (UID: "78b416b3-3796-4fa3-8a4f-7fa6107d98a1"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.058746 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "persistence") pod "78b416b3-3796-4fa3-8a4f-7fa6107d98a1" (UID: "78b416b3-3796-4fa3-8a4f-7fa6107d98a1"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.069791 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-7756b4f44b-9575x"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.080582 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.080959 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-config-data" (OuterVolumeSpecName: "config-data") pod "78b416b3-3796-4fa3-8a4f-7fa6107d98a1" (UID: "78b416b3-3796-4fa3-8a4f-7fa6107d98a1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.097694 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.104012 4765 scope.go:117] "RemoveContainer" containerID="7f551c1327d32f1311b5f2415da3d67dc653cf1705ba6dbc430aac97617ada56" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.118603 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.125414 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.131956 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-server-conf" (OuterVolumeSpecName: "server-conf") pod "78b416b3-3796-4fa3-8a4f-7fa6107d98a1" (UID: "78b416b3-3796-4fa3-8a4f-7fa6107d98a1"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.137174 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.144210 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.150208 4765 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.150242 4765 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.150251 4765 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-pod-info\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.150259 4765 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.150267 4765 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-server-conf\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.150274 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.150283 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2l6j\" (UniqueName: \"kubernetes.io/projected/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-kube-api-access-j2l6j\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.150319 4765 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.150327 4765 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.150351 4765 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.152174 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.152715 4765 scope.go:117] "RemoveContainer" containerID="db7c3938391cd43e268448d1b8ffd385e79c3741b66633044668f1d83c8facc4" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.166866 4765 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.175151 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.185943 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi1a4f-account-delete-d276m"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.195298 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novaapi1a4f-account-delete-d276m"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.197525 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "78b416b3-3796-4fa3-8a4f-7fa6107d98a1" (UID: "78b416b3-3796-4fa3-8a4f-7fa6107d98a1"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.229775 4765 scope.go:117] "RemoveContainer" containerID="6904c7a18870c99e59c094d428f4fb9aa0a7611afe5301700842c547c5f6ff4a" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.251308 4765 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.251345 4765 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/78b416b3-3796-4fa3-8a4f-7fa6107d98a1-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.265143 4765 scope.go:117] "RemoveContainer" containerID="73d0e7ff7a431b33ef59f1209e48d0aa7cb254124be09d9a85a9829d0d6c40a9" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.294122 4765 scope.go:117] "RemoveContainer" containerID="735741e28bbecc21170b1c3726ecc32ba7fee3884af169977fd59b27fb29dbe6" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.335591 4765 scope.go:117] "RemoveContainer" containerID="df8d58bedf5e00ee650bbe91fcb03935b51f76f914be11d02374897423dbf609" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.399355 4765 scope.go:117] "RemoveContainer" containerID="b8ad925fd038c364369863dffec329b548cd977bfa87db7419f585e9b7676be4" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.452698 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.456604 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lp226\" (UniqueName: \"kubernetes.io/projected/857ecb8b-72dc-40a7-a407-d85c40c40bcf-kube-api-access-lp226\") pod \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.456676 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-fernet-keys\") pod \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.456723 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-config-data\") pod \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.456746 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-public-tls-certs\") pod \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.456780 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-combined-ca-bundle\") pod \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.456914 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-internal-tls-certs\") pod \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.456944 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-scripts\") pod \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.456966 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-credential-keys\") pod \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\" (UID: \"857ecb8b-72dc-40a7-a407-d85c40c40bcf\") " Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.461413 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "857ecb8b-72dc-40a7-a407-d85c40c40bcf" (UID: "857ecb8b-72dc-40a7-a407-d85c40c40bcf"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.465290 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/857ecb8b-72dc-40a7-a407-d85c40c40bcf-kube-api-access-lp226" (OuterVolumeSpecName: "kube-api-access-lp226") pod "857ecb8b-72dc-40a7-a407-d85c40c40bcf" (UID: "857ecb8b-72dc-40a7-a407-d85c40c40bcf"). InnerVolumeSpecName "kube-api-access-lp226". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.474246 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-scripts" (OuterVolumeSpecName: "scripts") pod "857ecb8b-72dc-40a7-a407-d85c40c40bcf" (UID: "857ecb8b-72dc-40a7-a407-d85c40c40bcf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.478963 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "857ecb8b-72dc-40a7-a407-d85c40c40bcf" (UID: "857ecb8b-72dc-40a7-a407-d85c40c40bcf"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.529318 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-config-data" (OuterVolumeSpecName: "config-data") pod "857ecb8b-72dc-40a7-a407-d85c40c40bcf" (UID: "857ecb8b-72dc-40a7-a407-d85c40c40bcf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.539451 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "857ecb8b-72dc-40a7-a407-d85c40c40bcf" (UID: "857ecb8b-72dc-40a7-a407-d85c40c40bcf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.540886 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "857ecb8b-72dc-40a7-a407-d85c40c40bcf" (UID: "857ecb8b-72dc-40a7-a407-d85c40c40bcf"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.554505 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "857ecb8b-72dc-40a7-a407-d85c40c40bcf" (UID: "857ecb8b-72dc-40a7-a407-d85c40c40bcf"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.559725 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lp226\" (UniqueName: \"kubernetes.io/projected/857ecb8b-72dc-40a7-a407-d85c40c40bcf-kube-api-access-lp226\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.559763 4765 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.559775 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.559784 4765 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.559792 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.559800 4765 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.559809 4765 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.559817 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/857ecb8b-72dc-40a7-a407-d85c40c40bcf-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.582072 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-jvjbd"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.606628 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-jvjbd"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.609614 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-bd79-account-create-update-zdk9x"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.640262 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placementbd79-account-delete-fm679"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.659627 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placementbd79-account-delete-fm679"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.673715 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-bd79-account-create-update-zdk9x"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.678047 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-nwv46"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.686973 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-nwv46"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.696897 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron3778-account-delete-5p76n"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.729713 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron3778-account-delete-5p76n"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.743686 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_4b62b966-7b0a-4099-977c-44682f703187/ovn-northd/0.log" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.743775 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"4b62b966-7b0a-4099-977c-44682f703187","Type":"ContainerDied","Data":"2f22b42012c8f998a234b3a4cfeb7bf6c49da45ba398308a6f3d43549b1d9fd6"} Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.743819 4765 scope.go:117] "RemoveContainer" containerID="85b3d34a3b05018017b81c0696d2b13e8bc79227f19e89ee345bdd1bfbe284c6" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.743968 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.764528 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-3778-account-create-update-78648"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.778196 4765 generic.go:334] "Generic (PLEG): container finished" podID="857ecb8b-72dc-40a7-a407-d85c40c40bcf" containerID="97e9fc4165ac8704c4c0452ae52b6847fb82f3a8bea1c0cd166df2f7ea247752" exitCode=0 Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.778281 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-586f9fc866-46mgw" event={"ID":"857ecb8b-72dc-40a7-a407-d85c40c40bcf","Type":"ContainerDied","Data":"97e9fc4165ac8704c4c0452ae52b6847fb82f3a8bea1c0cd166df2f7ea247752"} Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.778313 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-586f9fc866-46mgw" event={"ID":"857ecb8b-72dc-40a7-a407-d85c40c40bcf","Type":"ContainerDied","Data":"4c350804e5a686bb826e970e780fb941f9e301cf2128273ca21fedd769bb5ee6"} Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.778385 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-586f9fc866-46mgw" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.781825 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-3778-account-create-update-78648"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.798759 4765 generic.go:334] "Generic (PLEG): container finished" podID="7d035962-836c-48cf-8ea4-a3e5a23f58f9" containerID="f54cf327f09ae7940909c6415da47f057333ee5a45036d48b02fd4c6fd91cb2d" exitCode=0 Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.798855 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7d035962-836c-48cf-8ea4-a3e5a23f58f9","Type":"ContainerDied","Data":"f54cf327f09ae7940909c6415da47f057333ee5a45036d48b02fd4c6fd91cb2d"} Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.822391 4765 scope.go:117] "RemoveContainer" containerID="07e96d7bfb948445f7eac9ed07260fdbff76c9bd60d8b7015a2929ae7b5bec3c" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.845789 4765 generic.go:334] "Generic (PLEG): container finished" podID="865e0a69-0d85-4d93-9d38-f52449d09d87" containerID="cb7c75803c1530a9151f1c8f58e20ea86e4933c86c600aeb7834d5e89e66efe6" exitCode=0 Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.845900 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-66fcc55b75-8hcl6" event={"ID":"865e0a69-0d85-4d93-9d38-f52449d09d87","Type":"ContainerDied","Data":"cb7c75803c1530a9151f1c8f58e20ea86e4933c86c600aeb7834d5e89e66efe6"} Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.852897 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-8zzkf"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.857107 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"78b416b3-3796-4fa3-8a4f-7fa6107d98a1","Type":"ContainerDied","Data":"598c1f7141a2b8fd523fccafc577be90babd1257b3415246bc8317d5bd5122df"} Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.857370 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.862361 4765 scope.go:117] "RemoveContainer" containerID="97e9fc4165ac8704c4c0452ae52b6847fb82f3a8bea1c0cd166df2f7ea247752" Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.870890 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-8zzkf"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.882968 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican43b0-account-delete-tm6mc"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.887097 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-43b0-account-create-update-29n95"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.897044 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-43b0-account-create-update-29n95"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.913510 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican43b0-account-delete-tm6mc"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.928246 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-586f9fc866-46mgw"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.939713 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-586f9fc866-46mgw"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.956159 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.962953 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.969839 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-8fh8t"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.979674 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-8fh8t"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.985882 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder1abb-account-delete-h8mp2"] Dec 10 07:12:53 crc kubenswrapper[4765]: I1210 07:12:53.996552 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-1abb-account-create-update-2dn48"] Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.007345 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-1abb-account-create-update-2dn48"] Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.017905 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder1abb-account-delete-h8mp2"] Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.041188 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.053388 4765 scope.go:117] "RemoveContainer" containerID="97e9fc4165ac8704c4c0452ae52b6847fb82f3a8bea1c0cd166df2f7ea247752" Dec 10 07:12:54 crc kubenswrapper[4765]: E1210 07:12:54.054462 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97e9fc4165ac8704c4c0452ae52b6847fb82f3a8bea1c0cd166df2f7ea247752\": container with ID starting with 97e9fc4165ac8704c4c0452ae52b6847fb82f3a8bea1c0cd166df2f7ea247752 not found: ID does not exist" containerID="97e9fc4165ac8704c4c0452ae52b6847fb82f3a8bea1c0cd166df2f7ea247752" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.054521 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97e9fc4165ac8704c4c0452ae52b6847fb82f3a8bea1c0cd166df2f7ea247752"} err="failed to get container status \"97e9fc4165ac8704c4c0452ae52b6847fb82f3a8bea1c0cd166df2f7ea247752\": rpc error: code = NotFound desc = could not find container \"97e9fc4165ac8704c4c0452ae52b6847fb82f3a8bea1c0cd166df2f7ea247752\": container with ID starting with 97e9fc4165ac8704c4c0452ae52b6847fb82f3a8bea1c0cd166df2f7ea247752 not found: ID does not exist" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.054559 4765 scope.go:117] "RemoveContainer" containerID="60f858d2d85b01e9da2a6a95ed2be831935e234ad3033222a4291f2052f6ce52" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.056885 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.061894 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.071936 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-confd\") pod \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.072036 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-server-conf\") pod \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.072079 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-plugins-conf\") pod \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.072150 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-plugins\") pod \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.072194 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfqtp\" (UniqueName: \"kubernetes.io/projected/7d035962-836c-48cf-8ea4-a3e5a23f58f9-kube-api-access-jfqtp\") pod \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.072220 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-tls\") pod \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.072277 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7d035962-836c-48cf-8ea4-a3e5a23f58f9-pod-info\") pod \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.072319 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-config-data\") pod \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.072376 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.072431 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-erlang-cookie\") pod \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.072520 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7d035962-836c-48cf-8ea4-a3e5a23f58f9-erlang-cookie-secret\") pod \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\" (UID: \"7d035962-836c-48cf-8ea4-a3e5a23f58f9\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.082644 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "7d035962-836c-48cf-8ea4-a3e5a23f58f9" (UID: "7d035962-836c-48cf-8ea4-a3e5a23f58f9"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.084208 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "7d035962-836c-48cf-8ea4-a3e5a23f58f9" (UID: "7d035962-836c-48cf-8ea4-a3e5a23f58f9"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.084979 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d035962-836c-48cf-8ea4-a3e5a23f58f9-kube-api-access-jfqtp" (OuterVolumeSpecName: "kube-api-access-jfqtp") pod "7d035962-836c-48cf-8ea4-a3e5a23f58f9" (UID: "7d035962-836c-48cf-8ea4-a3e5a23f58f9"). InnerVolumeSpecName "kube-api-access-jfqtp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.086485 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "7d035962-836c-48cf-8ea4-a3e5a23f58f9" (UID: "7d035962-836c-48cf-8ea4-a3e5a23f58f9"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.088565 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "7d035962-836c-48cf-8ea4-a3e5a23f58f9" (UID: "7d035962-836c-48cf-8ea4-a3e5a23f58f9"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.089609 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "persistence") pod "7d035962-836c-48cf-8ea4-a3e5a23f58f9" (UID: "7d035962-836c-48cf-8ea4-a3e5a23f58f9"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.095289 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/7d035962-836c-48cf-8ea4-a3e5a23f58f9-pod-info" (OuterVolumeSpecName: "pod-info") pod "7d035962-836c-48cf-8ea4-a3e5a23f58f9" (UID: "7d035962-836c-48cf-8ea4-a3e5a23f58f9"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.095311 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d035962-836c-48cf-8ea4-a3e5a23f58f9-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "7d035962-836c-48cf-8ea4-a3e5a23f58f9" (UID: "7d035962-836c-48cf-8ea4-a3e5a23f58f9"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.104418 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-config-data" (OuterVolumeSpecName: "config-data") pod "7d035962-836c-48cf-8ea4-a3e5a23f58f9" (UID: "7d035962-836c-48cf-8ea4-a3e5a23f58f9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.109356 4765 scope.go:117] "RemoveContainer" containerID="fc75f40345fbe86be4d1614b512064bb219a33382f202793ae741275bbb66a36" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.174219 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfqtp\" (UniqueName: \"kubernetes.io/projected/7d035962-836c-48cf-8ea4-a3e5a23f58f9-kube-api-access-jfqtp\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.174265 4765 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.174277 4765 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7d035962-836c-48cf-8ea4-a3e5a23f58f9-pod-info\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.174287 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.174325 4765 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.174337 4765 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.174349 4765 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7d035962-836c-48cf-8ea4-a3e5a23f58f9-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.174360 4765 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.174369 4765 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.191699 4765 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.275730 4765 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.279497 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-server-conf" (OuterVolumeSpecName: "server-conf") pod "7d035962-836c-48cf-8ea4-a3e5a23f58f9" (UID: "7d035962-836c-48cf-8ea4-a3e5a23f58f9"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.348112 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.387592 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/865e0a69-0d85-4d93-9d38-f52449d09d87-logs\") pod \"865e0a69-0d85-4d93-9d38-f52449d09d87\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.387770 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/865e0a69-0d85-4d93-9d38-f52449d09d87-config-data-custom\") pod \"865e0a69-0d85-4d93-9d38-f52449d09d87\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.387837 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/865e0a69-0d85-4d93-9d38-f52449d09d87-config-data\") pod \"865e0a69-0d85-4d93-9d38-f52449d09d87\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.387885 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77nlk\" (UniqueName: \"kubernetes.io/projected/865e0a69-0d85-4d93-9d38-f52449d09d87-kube-api-access-77nlk\") pod \"865e0a69-0d85-4d93-9d38-f52449d09d87\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.387935 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/865e0a69-0d85-4d93-9d38-f52449d09d87-combined-ca-bundle\") pod \"865e0a69-0d85-4d93-9d38-f52449d09d87\" (UID: \"865e0a69-0d85-4d93-9d38-f52449d09d87\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.388191 4765 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7d035962-836c-48cf-8ea4-a3e5a23f58f9-server-conf\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.389465 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "7d035962-836c-48cf-8ea4-a3e5a23f58f9" (UID: "7d035962-836c-48cf-8ea4-a3e5a23f58f9"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.391827 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/865e0a69-0d85-4d93-9d38-f52449d09d87-logs" (OuterVolumeSpecName: "logs") pod "865e0a69-0d85-4d93-9d38-f52449d09d87" (UID: "865e0a69-0d85-4d93-9d38-f52449d09d87"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.393849 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/865e0a69-0d85-4d93-9d38-f52449d09d87-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "865e0a69-0d85-4d93-9d38-f52449d09d87" (UID: "865e0a69-0d85-4d93-9d38-f52449d09d87"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.395382 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/865e0a69-0d85-4d93-9d38-f52449d09d87-kube-api-access-77nlk" (OuterVolumeSpecName: "kube-api-access-77nlk") pod "865e0a69-0d85-4d93-9d38-f52449d09d87" (UID: "865e0a69-0d85-4d93-9d38-f52449d09d87"). InnerVolumeSpecName "kube-api-access-77nlk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.420313 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/865e0a69-0d85-4d93-9d38-f52449d09d87-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "865e0a69-0d85-4d93-9d38-f52449d09d87" (UID: "865e0a69-0d85-4d93-9d38-f52449d09d87"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.441742 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.452981 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/865e0a69-0d85-4d93-9d38-f52449d09d87-config-data" (OuterVolumeSpecName: "config-data") pod "865e0a69-0d85-4d93-9d38-f52449d09d87" (UID: "865e0a69-0d85-4d93-9d38-f52449d09d87"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.459952 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.489078 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03838926-8208-43dc-9bfd-6af312a938a4-combined-ca-bundle\") pod \"03838926-8208-43dc-9bfd-6af312a938a4\" (UID: \"03838926-8208-43dc-9bfd-6af312a938a4\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.489169 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7694f523-adf7-4964-b475-6cd94cac7d75-config-data\") pod \"7694f523-adf7-4964-b475-6cd94cac7d75\" (UID: \"7694f523-adf7-4964-b475-6cd94cac7d75\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.489226 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nths2\" (UniqueName: \"kubernetes.io/projected/03838926-8208-43dc-9bfd-6af312a938a4-kube-api-access-nths2\") pod \"03838926-8208-43dc-9bfd-6af312a938a4\" (UID: \"03838926-8208-43dc-9bfd-6af312a938a4\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.489299 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7694f523-adf7-4964-b475-6cd94cac7d75-combined-ca-bundle\") pod \"7694f523-adf7-4964-b475-6cd94cac7d75\" (UID: \"7694f523-adf7-4964-b475-6cd94cac7d75\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.489765 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03838926-8208-43dc-9bfd-6af312a938a4-config-data\") pod \"03838926-8208-43dc-9bfd-6af312a938a4\" (UID: \"03838926-8208-43dc-9bfd-6af312a938a4\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.489840 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2c26\" (UniqueName: \"kubernetes.io/projected/7694f523-adf7-4964-b475-6cd94cac7d75-kube-api-access-c2c26\") pod \"7694f523-adf7-4964-b475-6cd94cac7d75\" (UID: \"7694f523-adf7-4964-b475-6cd94cac7d75\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.490141 4765 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7d035962-836c-48cf-8ea4-a3e5a23f58f9-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.490181 4765 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/865e0a69-0d85-4d93-9d38-f52449d09d87-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.490195 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/865e0a69-0d85-4d93-9d38-f52449d09d87-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.490206 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77nlk\" (UniqueName: \"kubernetes.io/projected/865e0a69-0d85-4d93-9d38-f52449d09d87-kube-api-access-77nlk\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.490219 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/865e0a69-0d85-4d93-9d38-f52449d09d87-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.490228 4765 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/865e0a69-0d85-4d93-9d38-f52449d09d87-logs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.492497 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7694f523-adf7-4964-b475-6cd94cac7d75-kube-api-access-c2c26" (OuterVolumeSpecName: "kube-api-access-c2c26") pod "7694f523-adf7-4964-b475-6cd94cac7d75" (UID: "7694f523-adf7-4964-b475-6cd94cac7d75"). InnerVolumeSpecName "kube-api-access-c2c26". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.493760 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03838926-8208-43dc-9bfd-6af312a938a4-kube-api-access-nths2" (OuterVolumeSpecName: "kube-api-access-nths2") pod "03838926-8208-43dc-9bfd-6af312a938a4" (UID: "03838926-8208-43dc-9bfd-6af312a938a4"). InnerVolumeSpecName "kube-api-access-nths2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.525432 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03838926-8208-43dc-9bfd-6af312a938a4-config-data" (OuterVolumeSpecName: "config-data") pod "03838926-8208-43dc-9bfd-6af312a938a4" (UID: "03838926-8208-43dc-9bfd-6af312a938a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.526308 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03838926-8208-43dc-9bfd-6af312a938a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "03838926-8208-43dc-9bfd-6af312a938a4" (UID: "03838926-8208-43dc-9bfd-6af312a938a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.529451 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7694f523-adf7-4964-b475-6cd94cac7d75-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7694f523-adf7-4964-b475-6cd94cac7d75" (UID: "7694f523-adf7-4964-b475-6cd94cac7d75"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.537383 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7694f523-adf7-4964-b475-6cd94cac7d75-config-data" (OuterVolumeSpecName: "config-data") pod "7694f523-adf7-4964-b475-6cd94cac7d75" (UID: "7694f523-adf7-4964-b475-6cd94cac7d75"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.579922 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.591752 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4406d4e6-b2a9-4e81-9672-b54775fad3bb-etc-machine-id\") pod \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.591831 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-config-data-custom\") pod \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.591878 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-combined-ca-bundle\") pod \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.591917 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-scripts\") pod \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.591968 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gjwm\" (UniqueName: \"kubernetes.io/projected/4406d4e6-b2a9-4e81-9672-b54775fad3bb-kube-api-access-6gjwm\") pod \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.592000 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-config-data\") pod \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\" (UID: \"4406d4e6-b2a9-4e81-9672-b54775fad3bb\") " Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.592637 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7694f523-adf7-4964-b475-6cd94cac7d75-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.592661 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03838926-8208-43dc-9bfd-6af312a938a4-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.592676 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2c26\" (UniqueName: \"kubernetes.io/projected/7694f523-adf7-4964-b475-6cd94cac7d75-kube-api-access-c2c26\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.592691 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03838926-8208-43dc-9bfd-6af312a938a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.592703 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7694f523-adf7-4964-b475-6cd94cac7d75-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.592714 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nths2\" (UniqueName: \"kubernetes.io/projected/03838926-8208-43dc-9bfd-6af312a938a4-kube-api-access-nths2\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.595152 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4406d4e6-b2a9-4e81-9672-b54775fad3bb-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "4406d4e6-b2a9-4e81-9672-b54775fad3bb" (UID: "4406d4e6-b2a9-4e81-9672-b54775fad3bb"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.598547 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-scripts" (OuterVolumeSpecName: "scripts") pod "4406d4e6-b2a9-4e81-9672-b54775fad3bb" (UID: "4406d4e6-b2a9-4e81-9672-b54775fad3bb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.602861 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4406d4e6-b2a9-4e81-9672-b54775fad3bb" (UID: "4406d4e6-b2a9-4e81-9672-b54775fad3bb"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.605341 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4406d4e6-b2a9-4e81-9672-b54775fad3bb-kube-api-access-6gjwm" (OuterVolumeSpecName: "kube-api-access-6gjwm") pod "4406d4e6-b2a9-4e81-9672-b54775fad3bb" (UID: "4406d4e6-b2a9-4e81-9672-b54775fad3bb"). InnerVolumeSpecName "kube-api-access-6gjwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.605709 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13147cfa-7543-4a9b-a97b-8a83abf32e0b" path="/var/lib/kubelet/pods/13147cfa-7543-4a9b-a97b-8a83abf32e0b/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.606541 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bbedff0-5b89-4bbb-b308-6ccb13c8216c" path="/var/lib/kubelet/pods/1bbedff0-5b89-4bbb-b308-6ccb13c8216c/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.607131 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fec9b72-da40-4d40-9f56-5eca02badaba" path="/var/lib/kubelet/pods/1fec9b72-da40-4d40-9f56-5eca02badaba/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.608581 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="367a49cf-488a-4852-8728-78dacbfbd500" path="/var/lib/kubelet/pods/367a49cf-488a-4852-8728-78dacbfbd500/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.609285 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b8d221c-31b8-4dd5-b89c-2545b88461d5" path="/var/lib/kubelet/pods/3b8d221c-31b8-4dd5-b89c-2545b88461d5/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.609938 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b62b966-7b0a-4099-977c-44682f703187" path="/var/lib/kubelet/pods/4b62b966-7b0a-4099-977c-44682f703187/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.611353 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61dce5ef-c4c0-4880-ab5c-87391d092897" path="/var/lib/kubelet/pods/61dce5ef-c4c0-4880-ab5c-87391d092897/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.611963 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="655c5f62-880e-4e05-9db8-da19844facdf" path="/var/lib/kubelet/pods/655c5f62-880e-4e05-9db8-da19844facdf/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.618688 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78b416b3-3796-4fa3-8a4f-7fa6107d98a1" path="/var/lib/kubelet/pods/78b416b3-3796-4fa3-8a4f-7fa6107d98a1/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.623585 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="857ecb8b-72dc-40a7-a407-d85c40c40bcf" path="/var/lib/kubelet/pods/857ecb8b-72dc-40a7-a407-d85c40c40bcf/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.625447 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="949ef1d3-9f74-4052-a482-9fea4e48d374" path="/var/lib/kubelet/pods/949ef1d3-9f74-4052-a482-9fea4e48d374/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.627880 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5a49f7e-a63c-4d23-ad8d-bc876ddf5786" path="/var/lib/kubelet/pods/a5a49f7e-a63c-4d23-ad8d-bc876ddf5786/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.629978 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af358477-7483-4e80-b209-e7991328cbb1" path="/var/lib/kubelet/pods/af358477-7483-4e80-b209-e7991328cbb1/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.632483 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1099ee9-e4d7-496f-b35e-7617ee456898" path="/var/lib/kubelet/pods/b1099ee9-e4d7-496f-b35e-7617ee456898/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.633758 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5b765ff-9a5a-48b8-80c7-e8bd56613fcc" path="/var/lib/kubelet/pods/b5b765ff-9a5a-48b8-80c7-e8bd56613fcc/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.634834 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4" path="/var/lib/kubelet/pods/ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.636571 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c417c995-d247-48fe-afea-472a698e27f4" path="/var/lib/kubelet/pods/c417c995-d247-48fe-afea-472a698e27f4/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.638172 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7783f54-a81e-4b16-b37e-fa6d74c0d6f9" path="/var/lib/kubelet/pods/c7783f54-a81e-4b16-b37e-fa6d74c0d6f9/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.638964 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d11b24c2-0ac0-4f23-a575-d1b80db4ba11" path="/var/lib/kubelet/pods/d11b24c2-0ac0-4f23-a575-d1b80db4ba11/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.640940 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5a7a5c7-95da-49fe-ae5f-f3423f347fed" path="/var/lib/kubelet/pods/d5a7a5c7-95da-49fe-ae5f-f3423f347fed/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.641813 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3eeab68-7df8-402a-a581-23af26d7be84" path="/var/lib/kubelet/pods/e3eeab68-7df8-402a-a581-23af26d7be84/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.642601 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efa8eaec-19fb-43da-a1f3-557b0847e966" path="/var/lib/kubelet/pods/efa8eaec-19fb-43da-a1f3-557b0847e966/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.644179 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f429f6df-3510-4c6a-b9e8-062895218832" path="/var/lib/kubelet/pods/f429f6df-3510-4c6a-b9e8-062895218832/volumes" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.675238 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4406d4e6-b2a9-4e81-9672-b54775fad3bb" (UID: "4406d4e6-b2a9-4e81-9672-b54775fad3bb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.694239 4765 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.694297 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.694313 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.694324 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gjwm\" (UniqueName: \"kubernetes.io/projected/4406d4e6-b2a9-4e81-9672-b54775fad3bb-kube-api-access-6gjwm\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.694356 4765 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4406d4e6-b2a9-4e81-9672-b54775fad3bb-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.725885 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-config-data" (OuterVolumeSpecName: "config-data") pod "4406d4e6-b2a9-4e81-9672-b54775fad3bb" (UID: "4406d4e6-b2a9-4e81-9672-b54775fad3bb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.796312 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4406d4e6-b2a9-4e81-9672-b54775fad3bb-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.877761 4765 generic.go:334] "Generic (PLEG): container finished" podID="03838926-8208-43dc-9bfd-6af312a938a4" containerID="e60c3598bd56315a17f528317b4f8384d07efe59462616bb913d1726a981a607" exitCode=0 Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.877827 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"03838926-8208-43dc-9bfd-6af312a938a4","Type":"ContainerDied","Data":"e60c3598bd56315a17f528317b4f8384d07efe59462616bb913d1726a981a607"} Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.877862 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"03838926-8208-43dc-9bfd-6af312a938a4","Type":"ContainerDied","Data":"6059940bd5580565536056565d4b076a9a034400dafc036086624ad23a9614e6"} Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.877880 4765 scope.go:117] "RemoveContainer" containerID="e60c3598bd56315a17f528317b4f8384d07efe59462616bb913d1726a981a607" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.877999 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.884831 4765 generic.go:334] "Generic (PLEG): container finished" podID="7694f523-adf7-4964-b475-6cd94cac7d75" containerID="b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90" exitCode=0 Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.884888 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.884929 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7694f523-adf7-4964-b475-6cd94cac7d75","Type":"ContainerDied","Data":"b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90"} Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.885048 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7694f523-adf7-4964-b475-6cd94cac7d75","Type":"ContainerDied","Data":"d13f473a0de67d93c2d59c4d3e99676f932522d547214a58d1d827210a66196b"} Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.893869 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7d035962-836c-48cf-8ea4-a3e5a23f58f9","Type":"ContainerDied","Data":"fa211ab29fdde41c1f086ce2d57513215a3c715d127716c2d3ad307419fdecce"} Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.894017 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.901192 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.904922 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-66fcc55b75-8hcl6" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.904937 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-66fcc55b75-8hcl6" event={"ID":"865e0a69-0d85-4d93-9d38-f52449d09d87","Type":"ContainerDied","Data":"a2bf5e315028c62ae807f755c9bf7e1759223d795629b89a473469c587e7df12"} Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.910321 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.918220 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.919570 4765 scope.go:117] "RemoveContainer" containerID="e60c3598bd56315a17f528317b4f8384d07efe59462616bb913d1726a981a607" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.919746 4765 generic.go:334] "Generic (PLEG): container finished" podID="4406d4e6-b2a9-4e81-9672-b54775fad3bb" containerID="cb79bead574a5e96dc6ceab7d7db16192f7a0ba03fb849fa04afc06f3a7a41d8" exitCode=0 Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.919866 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 07:12:54 crc kubenswrapper[4765]: E1210 07:12:54.920636 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e60c3598bd56315a17f528317b4f8384d07efe59462616bb913d1726a981a607\": container with ID starting with e60c3598bd56315a17f528317b4f8384d07efe59462616bb913d1726a981a607 not found: ID does not exist" containerID="e60c3598bd56315a17f528317b4f8384d07efe59462616bb913d1726a981a607" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.920775 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e60c3598bd56315a17f528317b4f8384d07efe59462616bb913d1726a981a607"} err="failed to get container status \"e60c3598bd56315a17f528317b4f8384d07efe59462616bb913d1726a981a607\": rpc error: code = NotFound desc = could not find container \"e60c3598bd56315a17f528317b4f8384d07efe59462616bb913d1726a981a607\": container with ID starting with e60c3598bd56315a17f528317b4f8384d07efe59462616bb913d1726a981a607 not found: ID does not exist" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.920800 4765 scope.go:117] "RemoveContainer" containerID="b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.922302 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4406d4e6-b2a9-4e81-9672-b54775fad3bb","Type":"ContainerDied","Data":"cb79bead574a5e96dc6ceab7d7db16192f7a0ba03fb849fa04afc06f3a7a41d8"} Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.922451 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4406d4e6-b2a9-4e81-9672-b54775fad3bb","Type":"ContainerDied","Data":"3fddf3e012dcfcd3c7c9523db777474ace50b2f2a34da659dd237c24ded64b53"} Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.930056 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.949530 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.956188 4765 scope.go:117] "RemoveContainer" containerID="b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90" Dec 10 07:12:54 crc kubenswrapper[4765]: E1210 07:12:54.958530 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90\": container with ID starting with b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90 not found: ID does not exist" containerID="b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.959046 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90"} err="failed to get container status \"b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90\": rpc error: code = NotFound desc = could not find container \"b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90\": container with ID starting with b1b57c27066a24c2bb2e53967e5d8166ed666503cc668d2df93d3c564ea2bf90 not found: ID does not exist" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.959097 4765 scope.go:117] "RemoveContainer" containerID="f54cf327f09ae7940909c6415da47f057333ee5a45036d48b02fd4c6fd91cb2d" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.963960 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.970151 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-66fcc55b75-8hcl6"] Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.975526 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-66fcc55b75-8hcl6"] Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.980804 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.981500 4765 scope.go:117] "RemoveContainer" containerID="a10778f7f86ddbb0712d58a66f9e2ebd6b349454a29483ae7d2e89e3bf524a02" Dec 10 07:12:54 crc kubenswrapper[4765]: I1210 07:12:54.986050 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 07:12:55 crc kubenswrapper[4765]: I1210 07:12:55.001418 4765 scope.go:117] "RemoveContainer" containerID="cb7c75803c1530a9151f1c8f58e20ea86e4933c86c600aeb7834d5e89e66efe6" Dec 10 07:12:55 crc kubenswrapper[4765]: I1210 07:12:55.028295 4765 scope.go:117] "RemoveContainer" containerID="2ea6901c79ede6a161d2e30da9d3b8efb9ba7a80dd2d68ffbc3c35ec54f42907" Dec 10 07:12:55 crc kubenswrapper[4765]: I1210 07:12:55.046646 4765 scope.go:117] "RemoveContainer" containerID="ef36e8ca9904a20d9e0bedad569a152dc56cb8af69b2e6ed8a76e1fc5c40bec5" Dec 10 07:12:55 crc kubenswrapper[4765]: I1210 07:12:55.067613 4765 scope.go:117] "RemoveContainer" containerID="cb79bead574a5e96dc6ceab7d7db16192f7a0ba03fb849fa04afc06f3a7a41d8" Dec 10 07:12:55 crc kubenswrapper[4765]: I1210 07:12:55.091730 4765 scope.go:117] "RemoveContainer" containerID="ef36e8ca9904a20d9e0bedad569a152dc56cb8af69b2e6ed8a76e1fc5c40bec5" Dec 10 07:12:55 crc kubenswrapper[4765]: E1210 07:12:55.092816 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef36e8ca9904a20d9e0bedad569a152dc56cb8af69b2e6ed8a76e1fc5c40bec5\": container with ID starting with ef36e8ca9904a20d9e0bedad569a152dc56cb8af69b2e6ed8a76e1fc5c40bec5 not found: ID does not exist" containerID="ef36e8ca9904a20d9e0bedad569a152dc56cb8af69b2e6ed8a76e1fc5c40bec5" Dec 10 07:12:55 crc kubenswrapper[4765]: I1210 07:12:55.092873 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef36e8ca9904a20d9e0bedad569a152dc56cb8af69b2e6ed8a76e1fc5c40bec5"} err="failed to get container status \"ef36e8ca9904a20d9e0bedad569a152dc56cb8af69b2e6ed8a76e1fc5c40bec5\": rpc error: code = NotFound desc = could not find container \"ef36e8ca9904a20d9e0bedad569a152dc56cb8af69b2e6ed8a76e1fc5c40bec5\": container with ID starting with ef36e8ca9904a20d9e0bedad569a152dc56cb8af69b2e6ed8a76e1fc5c40bec5 not found: ID does not exist" Dec 10 07:12:55 crc kubenswrapper[4765]: I1210 07:12:55.092907 4765 scope.go:117] "RemoveContainer" containerID="cb79bead574a5e96dc6ceab7d7db16192f7a0ba03fb849fa04afc06f3a7a41d8" Dec 10 07:12:55 crc kubenswrapper[4765]: E1210 07:12:55.093462 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb79bead574a5e96dc6ceab7d7db16192f7a0ba03fb849fa04afc06f3a7a41d8\": container with ID starting with cb79bead574a5e96dc6ceab7d7db16192f7a0ba03fb849fa04afc06f3a7a41d8 not found: ID does not exist" containerID="cb79bead574a5e96dc6ceab7d7db16192f7a0ba03fb849fa04afc06f3a7a41d8" Dec 10 07:12:55 crc kubenswrapper[4765]: I1210 07:12:55.093501 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb79bead574a5e96dc6ceab7d7db16192f7a0ba03fb849fa04afc06f3a7a41d8"} err="failed to get container status \"cb79bead574a5e96dc6ceab7d7db16192f7a0ba03fb849fa04afc06f3a7a41d8\": rpc error: code = NotFound desc = could not find container \"cb79bead574a5e96dc6ceab7d7db16192f7a0ba03fb849fa04afc06f3a7a41d8\": container with ID starting with cb79bead574a5e96dc6ceab7d7db16192f7a0ba03fb849fa04afc06f3a7a41d8 not found: ID does not exist" Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.603422 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03838926-8208-43dc-9bfd-6af312a938a4" path="/var/lib/kubelet/pods/03838926-8208-43dc-9bfd-6af312a938a4/volumes" Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.604665 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4406d4e6-b2a9-4e81-9672-b54775fad3bb" path="/var/lib/kubelet/pods/4406d4e6-b2a9-4e81-9672-b54775fad3bb/volumes" Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.605730 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7694f523-adf7-4964-b475-6cd94cac7d75" path="/var/lib/kubelet/pods/7694f523-adf7-4964-b475-6cd94cac7d75/volumes" Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.607980 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d035962-836c-48cf-8ea4-a3e5a23f58f9" path="/var/lib/kubelet/pods/7d035962-836c-48cf-8ea4-a3e5a23f58f9/volumes" Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.611635 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="865e0a69-0d85-4d93-9d38-f52449d09d87" path="/var/lib/kubelet/pods/865e0a69-0d85-4d93-9d38-f52449d09d87/volumes" Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.762864 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.953693 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-internal-tls-certs\") pod \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.953860 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-httpd-config\") pod \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.953998 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-public-tls-certs\") pod \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.954055 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-config\") pod \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.954212 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-ovndb-tls-certs\") pod \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.954271 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5dzm\" (UniqueName: \"kubernetes.io/projected/4e9d4a75-10e4-46dd-9180-821c917a2b5e-kube-api-access-n5dzm\") pod \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.954374 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-combined-ca-bundle\") pod \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\" (UID: \"4e9d4a75-10e4-46dd-9180-821c917a2b5e\") " Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.963759 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e9d4a75-10e4-46dd-9180-821c917a2b5e-kube-api-access-n5dzm" (OuterVolumeSpecName: "kube-api-access-n5dzm") pod "4e9d4a75-10e4-46dd-9180-821c917a2b5e" (UID: "4e9d4a75-10e4-46dd-9180-821c917a2b5e"). InnerVolumeSpecName "kube-api-access-n5dzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.963859 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "4e9d4a75-10e4-46dd-9180-821c917a2b5e" (UID: "4e9d4a75-10e4-46dd-9180-821c917a2b5e"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.982687 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.986806 4765 generic.go:334] "Generic (PLEG): container finished" podID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerID="0f83ede292c143b8abe040b9c236b324a83ebd2fc80b7a5c8ba399301b7c6a04" exitCode=0 Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.986895 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8847d4d-d633-4ca3-90fa-3384e525864d","Type":"ContainerDied","Data":"0f83ede292c143b8abe040b9c236b324a83ebd2fc80b7a5c8ba399301b7c6a04"} Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.987268 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8847d4d-d633-4ca3-90fa-3384e525864d","Type":"ContainerDied","Data":"aa360b4d24812d38d9d2f1c956e002d5537c82bf7c445513720cbadc19e3682a"} Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.987347 4765 scope.go:117] "RemoveContainer" containerID="5c25e5022d3f67b2367a689a9cf70865a17087f11b9b9853173e30c48e270b2d" Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.990367 4765 generic.go:334] "Generic (PLEG): container finished" podID="4e9d4a75-10e4-46dd-9180-821c917a2b5e" containerID="1e66dbb7593398b7b65f4ca08f3dd8507c55a48af39dcf35231d9d262161ded4" exitCode=0 Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.990535 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-654b8cdb7c-84l5p" Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.990539 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-654b8cdb7c-84l5p" event={"ID":"4e9d4a75-10e4-46dd-9180-821c917a2b5e","Type":"ContainerDied","Data":"1e66dbb7593398b7b65f4ca08f3dd8507c55a48af39dcf35231d9d262161ded4"} Dec 10 07:12:56 crc kubenswrapper[4765]: I1210 07:12:56.990700 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-654b8cdb7c-84l5p" event={"ID":"4e9d4a75-10e4-46dd-9180-821c917a2b5e","Type":"ContainerDied","Data":"e1a1b8adee1854c555d369972772b8dd5cfab049835af6ed630ec9a3f7573dd0"} Dec 10 07:12:57 crc kubenswrapper[4765]: E1210 07:12:57.000973 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 10 07:12:57 crc kubenswrapper[4765]: E1210 07:12:57.001380 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 10 07:12:57 crc kubenswrapper[4765]: E1210 07:12:57.001839 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 10 07:12:57 crc kubenswrapper[4765]: E1210 07:12:57.001890 4765 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-v6h5d" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovsdb-server" Dec 10 07:12:57 crc kubenswrapper[4765]: E1210 07:12:57.009276 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 10 07:12:57 crc kubenswrapper[4765]: E1210 07:12:57.015902 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.016745 4765 scope.go:117] "RemoveContainer" containerID="eff0f8e0952a591ccab8adaf9372a01139e40a5b801b999a936af80ed6f7fd8e" Dec 10 07:12:57 crc kubenswrapper[4765]: E1210 07:12:57.017788 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 10 07:12:57 crc kubenswrapper[4765]: E1210 07:12:57.017864 4765 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-v6h5d" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovs-vswitchd" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.028689 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e9d4a75-10e4-46dd-9180-821c917a2b5e" (UID: "4e9d4a75-10e4-46dd-9180-821c917a2b5e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.029355 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-config" (OuterVolumeSpecName: "config") pod "4e9d4a75-10e4-46dd-9180-821c917a2b5e" (UID: "4e9d4a75-10e4-46dd-9180-821c917a2b5e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.030870 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4e9d4a75-10e4-46dd-9180-821c917a2b5e" (UID: "4e9d4a75-10e4-46dd-9180-821c917a2b5e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.032054 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4e9d4a75-10e4-46dd-9180-821c917a2b5e" (UID: "4e9d4a75-10e4-46dd-9180-821c917a2b5e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.036818 4765 scope.go:117] "RemoveContainer" containerID="0f83ede292c143b8abe040b9c236b324a83ebd2fc80b7a5c8ba399301b7c6a04" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.049356 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "4e9d4a75-10e4-46dd-9180-821c917a2b5e" (UID: "4e9d4a75-10e4-46dd-9180-821c917a2b5e"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.055881 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tm64z\" (UniqueName: \"kubernetes.io/projected/c8847d4d-d633-4ca3-90fa-3384e525864d-kube-api-access-tm64z\") pod \"c8847d4d-d633-4ca3-90fa-3384e525864d\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.055947 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-config-data\") pod \"c8847d4d-d633-4ca3-90fa-3384e525864d\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.056017 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-ceilometer-tls-certs\") pod \"c8847d4d-d633-4ca3-90fa-3384e525864d\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.056078 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-combined-ca-bundle\") pod \"c8847d4d-d633-4ca3-90fa-3384e525864d\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.056171 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-scripts\") pod \"c8847d4d-d633-4ca3-90fa-3384e525864d\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.056219 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8847d4d-d633-4ca3-90fa-3384e525864d-run-httpd\") pod \"c8847d4d-d633-4ca3-90fa-3384e525864d\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.056244 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-sg-core-conf-yaml\") pod \"c8847d4d-d633-4ca3-90fa-3384e525864d\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.056304 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8847d4d-d633-4ca3-90fa-3384e525864d-log-httpd\") pod \"c8847d4d-d633-4ca3-90fa-3384e525864d\" (UID: \"c8847d4d-d633-4ca3-90fa-3384e525864d\") " Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.056655 4765 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.056679 4765 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.056695 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-config\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.056707 4765 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.056719 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5dzm\" (UniqueName: \"kubernetes.io/projected/4e9d4a75-10e4-46dd-9180-821c917a2b5e-kube-api-access-n5dzm\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.056732 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.056743 4765 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e9d4a75-10e4-46dd-9180-821c917a2b5e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.057339 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8847d4d-d633-4ca3-90fa-3384e525864d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c8847d4d-d633-4ca3-90fa-3384e525864d" (UID: "c8847d4d-d633-4ca3-90fa-3384e525864d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.057357 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8847d4d-d633-4ca3-90fa-3384e525864d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c8847d4d-d633-4ca3-90fa-3384e525864d" (UID: "c8847d4d-d633-4ca3-90fa-3384e525864d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.058865 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8847d4d-d633-4ca3-90fa-3384e525864d-kube-api-access-tm64z" (OuterVolumeSpecName: "kube-api-access-tm64z") pod "c8847d4d-d633-4ca3-90fa-3384e525864d" (UID: "c8847d4d-d633-4ca3-90fa-3384e525864d"). InnerVolumeSpecName "kube-api-access-tm64z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.059802 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-scripts" (OuterVolumeSpecName: "scripts") pod "c8847d4d-d633-4ca3-90fa-3384e525864d" (UID: "c8847d4d-d633-4ca3-90fa-3384e525864d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.065051 4765 scope.go:117] "RemoveContainer" containerID="38e7778e406b7c1c45d7bce71effadc0372bfa9af421a74aaf2eef133262aa8c" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.077783 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c8847d4d-d633-4ca3-90fa-3384e525864d" (UID: "c8847d4d-d633-4ca3-90fa-3384e525864d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.087553 4765 scope.go:117] "RemoveContainer" containerID="5c25e5022d3f67b2367a689a9cf70865a17087f11b9b9853173e30c48e270b2d" Dec 10 07:12:57 crc kubenswrapper[4765]: E1210 07:12:57.088754 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c25e5022d3f67b2367a689a9cf70865a17087f11b9b9853173e30c48e270b2d\": container with ID starting with 5c25e5022d3f67b2367a689a9cf70865a17087f11b9b9853173e30c48e270b2d not found: ID does not exist" containerID="5c25e5022d3f67b2367a689a9cf70865a17087f11b9b9853173e30c48e270b2d" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.088789 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c25e5022d3f67b2367a689a9cf70865a17087f11b9b9853173e30c48e270b2d"} err="failed to get container status \"5c25e5022d3f67b2367a689a9cf70865a17087f11b9b9853173e30c48e270b2d\": rpc error: code = NotFound desc = could not find container \"5c25e5022d3f67b2367a689a9cf70865a17087f11b9b9853173e30c48e270b2d\": container with ID starting with 5c25e5022d3f67b2367a689a9cf70865a17087f11b9b9853173e30c48e270b2d not found: ID does not exist" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.088821 4765 scope.go:117] "RemoveContainer" containerID="eff0f8e0952a591ccab8adaf9372a01139e40a5b801b999a936af80ed6f7fd8e" Dec 10 07:12:57 crc kubenswrapper[4765]: E1210 07:12:57.089503 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eff0f8e0952a591ccab8adaf9372a01139e40a5b801b999a936af80ed6f7fd8e\": container with ID starting with eff0f8e0952a591ccab8adaf9372a01139e40a5b801b999a936af80ed6f7fd8e not found: ID does not exist" containerID="eff0f8e0952a591ccab8adaf9372a01139e40a5b801b999a936af80ed6f7fd8e" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.089565 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eff0f8e0952a591ccab8adaf9372a01139e40a5b801b999a936af80ed6f7fd8e"} err="failed to get container status \"eff0f8e0952a591ccab8adaf9372a01139e40a5b801b999a936af80ed6f7fd8e\": rpc error: code = NotFound desc = could not find container \"eff0f8e0952a591ccab8adaf9372a01139e40a5b801b999a936af80ed6f7fd8e\": container with ID starting with eff0f8e0952a591ccab8adaf9372a01139e40a5b801b999a936af80ed6f7fd8e not found: ID does not exist" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.089613 4765 scope.go:117] "RemoveContainer" containerID="0f83ede292c143b8abe040b9c236b324a83ebd2fc80b7a5c8ba399301b7c6a04" Dec 10 07:12:57 crc kubenswrapper[4765]: E1210 07:12:57.090071 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f83ede292c143b8abe040b9c236b324a83ebd2fc80b7a5c8ba399301b7c6a04\": container with ID starting with 0f83ede292c143b8abe040b9c236b324a83ebd2fc80b7a5c8ba399301b7c6a04 not found: ID does not exist" containerID="0f83ede292c143b8abe040b9c236b324a83ebd2fc80b7a5c8ba399301b7c6a04" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.090138 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f83ede292c143b8abe040b9c236b324a83ebd2fc80b7a5c8ba399301b7c6a04"} err="failed to get container status \"0f83ede292c143b8abe040b9c236b324a83ebd2fc80b7a5c8ba399301b7c6a04\": rpc error: code = NotFound desc = could not find container \"0f83ede292c143b8abe040b9c236b324a83ebd2fc80b7a5c8ba399301b7c6a04\": container with ID starting with 0f83ede292c143b8abe040b9c236b324a83ebd2fc80b7a5c8ba399301b7c6a04 not found: ID does not exist" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.090171 4765 scope.go:117] "RemoveContainer" containerID="38e7778e406b7c1c45d7bce71effadc0372bfa9af421a74aaf2eef133262aa8c" Dec 10 07:12:57 crc kubenswrapper[4765]: E1210 07:12:57.090590 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38e7778e406b7c1c45d7bce71effadc0372bfa9af421a74aaf2eef133262aa8c\": container with ID starting with 38e7778e406b7c1c45d7bce71effadc0372bfa9af421a74aaf2eef133262aa8c not found: ID does not exist" containerID="38e7778e406b7c1c45d7bce71effadc0372bfa9af421a74aaf2eef133262aa8c" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.090626 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38e7778e406b7c1c45d7bce71effadc0372bfa9af421a74aaf2eef133262aa8c"} err="failed to get container status \"38e7778e406b7c1c45d7bce71effadc0372bfa9af421a74aaf2eef133262aa8c\": rpc error: code = NotFound desc = could not find container \"38e7778e406b7c1c45d7bce71effadc0372bfa9af421a74aaf2eef133262aa8c\": container with ID starting with 38e7778e406b7c1c45d7bce71effadc0372bfa9af421a74aaf2eef133262aa8c not found: ID does not exist" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.090646 4765 scope.go:117] "RemoveContainer" containerID="2b76a61363bc9d6919fda1604c79fc3c7f411448fef6487068bdd864dabfac24" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.095217 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "c8847d4d-d633-4ca3-90fa-3384e525864d" (UID: "c8847d4d-d633-4ca3-90fa-3384e525864d"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.114924 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c8847d4d-d633-4ca3-90fa-3384e525864d" (UID: "c8847d4d-d633-4ca3-90fa-3384e525864d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.114964 4765 scope.go:117] "RemoveContainer" containerID="1e66dbb7593398b7b65f4ca08f3dd8507c55a48af39dcf35231d9d262161ded4" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.127507 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-config-data" (OuterVolumeSpecName: "config-data") pod "c8847d4d-d633-4ca3-90fa-3384e525864d" (UID: "c8847d4d-d633-4ca3-90fa-3384e525864d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.134824 4765 scope.go:117] "RemoveContainer" containerID="2b76a61363bc9d6919fda1604c79fc3c7f411448fef6487068bdd864dabfac24" Dec 10 07:12:57 crc kubenswrapper[4765]: E1210 07:12:57.135361 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b76a61363bc9d6919fda1604c79fc3c7f411448fef6487068bdd864dabfac24\": container with ID starting with 2b76a61363bc9d6919fda1604c79fc3c7f411448fef6487068bdd864dabfac24 not found: ID does not exist" containerID="2b76a61363bc9d6919fda1604c79fc3c7f411448fef6487068bdd864dabfac24" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.135421 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b76a61363bc9d6919fda1604c79fc3c7f411448fef6487068bdd864dabfac24"} err="failed to get container status \"2b76a61363bc9d6919fda1604c79fc3c7f411448fef6487068bdd864dabfac24\": rpc error: code = NotFound desc = could not find container \"2b76a61363bc9d6919fda1604c79fc3c7f411448fef6487068bdd864dabfac24\": container with ID starting with 2b76a61363bc9d6919fda1604c79fc3c7f411448fef6487068bdd864dabfac24 not found: ID does not exist" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.135446 4765 scope.go:117] "RemoveContainer" containerID="1e66dbb7593398b7b65f4ca08f3dd8507c55a48af39dcf35231d9d262161ded4" Dec 10 07:12:57 crc kubenswrapper[4765]: E1210 07:12:57.135759 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e66dbb7593398b7b65f4ca08f3dd8507c55a48af39dcf35231d9d262161ded4\": container with ID starting with 1e66dbb7593398b7b65f4ca08f3dd8507c55a48af39dcf35231d9d262161ded4 not found: ID does not exist" containerID="1e66dbb7593398b7b65f4ca08f3dd8507c55a48af39dcf35231d9d262161ded4" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.135802 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e66dbb7593398b7b65f4ca08f3dd8507c55a48af39dcf35231d9d262161ded4"} err="failed to get container status \"1e66dbb7593398b7b65f4ca08f3dd8507c55a48af39dcf35231d9d262161ded4\": rpc error: code = NotFound desc = could not find container \"1e66dbb7593398b7b65f4ca08f3dd8507c55a48af39dcf35231d9d262161ded4\": container with ID starting with 1e66dbb7593398b7b65f4ca08f3dd8507c55a48af39dcf35231d9d262161ded4 not found: ID does not exist" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.158278 4765 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8847d4d-d633-4ca3-90fa-3384e525864d-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.158329 4765 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.158341 4765 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8847d4d-d633-4ca3-90fa-3384e525864d-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.158351 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tm64z\" (UniqueName: \"kubernetes.io/projected/c8847d4d-d633-4ca3-90fa-3384e525864d-kube-api-access-tm64z\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.158362 4765 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.158370 4765 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.158379 4765 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.158387 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8847d4d-d633-4ca3-90fa-3384e525864d-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.325174 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-654b8cdb7c-84l5p"] Dec 10 07:12:57 crc kubenswrapper[4765]: I1210 07:12:57.330585 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-654b8cdb7c-84l5p"] Dec 10 07:12:58 crc kubenswrapper[4765]: I1210 07:12:58.003465 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 07:12:58 crc kubenswrapper[4765]: I1210 07:12:58.044312 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:12:58 crc kubenswrapper[4765]: I1210 07:12:58.050931 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 07:12:58 crc kubenswrapper[4765]: I1210 07:12:58.622371 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e9d4a75-10e4-46dd-9180-821c917a2b5e" path="/var/lib/kubelet/pods/4e9d4a75-10e4-46dd-9180-821c917a2b5e/volumes" Dec 10 07:12:58 crc kubenswrapper[4765]: I1210 07:12:58.623418 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8847d4d-d633-4ca3-90fa-3384e525864d" path="/var/lib/kubelet/pods/c8847d4d-d633-4ca3-90fa-3384e525864d/volumes" Dec 10 07:13:02 crc kubenswrapper[4765]: E1210 07:13:02.001586 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 10 07:13:02 crc kubenswrapper[4765]: E1210 07:13:02.001971 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 10 07:13:02 crc kubenswrapper[4765]: E1210 07:13:02.002264 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 10 07:13:02 crc kubenswrapper[4765]: E1210 07:13:02.002302 4765 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-v6h5d" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovsdb-server" Dec 10 07:13:02 crc kubenswrapper[4765]: E1210 07:13:02.004769 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 10 07:13:02 crc kubenswrapper[4765]: E1210 07:13:02.006170 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 10 07:13:02 crc kubenswrapper[4765]: E1210 07:13:02.010424 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 10 07:13:02 crc kubenswrapper[4765]: E1210 07:13:02.010504 4765 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-v6h5d" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovs-vswitchd" Dec 10 07:13:04 crc kubenswrapper[4765]: I1210 07:13:04.049688 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:13:04 crc kubenswrapper[4765]: I1210 07:13:04.050112 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:13:07 crc kubenswrapper[4765]: E1210 07:13:07.001367 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 10 07:13:07 crc kubenswrapper[4765]: E1210 07:13:07.002385 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 10 07:13:07 crc kubenswrapper[4765]: E1210 07:13:07.002806 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 10 07:13:07 crc kubenswrapper[4765]: E1210 07:13:07.002844 4765 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-v6h5d" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovsdb-server" Dec 10 07:13:07 crc kubenswrapper[4765]: E1210 07:13:07.003404 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 10 07:13:07 crc kubenswrapper[4765]: E1210 07:13:07.005322 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 10 07:13:07 crc kubenswrapper[4765]: E1210 07:13:07.006658 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 10 07:13:07 crc kubenswrapper[4765]: E1210 07:13:07.006720 4765 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-v6h5d" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovs-vswitchd" Dec 10 07:13:12 crc kubenswrapper[4765]: E1210 07:13:12.001317 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 10 07:13:12 crc kubenswrapper[4765]: E1210 07:13:12.002699 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 10 07:13:12 crc kubenswrapper[4765]: E1210 07:13:12.002936 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 10 07:13:12 crc kubenswrapper[4765]: E1210 07:13:12.003318 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 10 07:13:12 crc kubenswrapper[4765]: E1210 07:13:12.003435 4765 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-v6h5d" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovsdb-server" Dec 10 07:13:12 crc kubenswrapper[4765]: E1210 07:13:12.004376 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 10 07:13:12 crc kubenswrapper[4765]: E1210 07:13:12.005868 4765 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 10 07:13:12 crc kubenswrapper[4765]: E1210 07:13:12.006017 4765 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-v6h5d" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovs-vswitchd" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.105619 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.165030 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-v6h5d_209844a2-e0ac-447f-99f6-28cd864ca648/ovs-vswitchd/0.log" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.166016 4765 generic.go:334] "Generic (PLEG): container finished" podID="209844a2-e0ac-447f-99f6-28cd864ca648" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" exitCode=137 Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.166137 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-v6h5d" event={"ID":"209844a2-e0ac-447f-99f6-28cd864ca648","Type":"ContainerDied","Data":"d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079"} Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.173580 4765 generic.go:334] "Generic (PLEG): container finished" podID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerID="39226f8cc8a03930bd902803a8b4288d6032977d3c35db4259b0445250e1e7b2" exitCode=137 Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.173637 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerDied","Data":"39226f8cc8a03930bd902803a8b4288d6032977d3c35db4259b0445250e1e7b2"} Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.173700 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86","Type":"ContainerDied","Data":"47a6992e3f0b8ebb8c2ac3bece8d28b964b32584f5893c5295ffba6b0965a1f0"} Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.173722 4765 scope.go:117] "RemoveContainer" containerID="39226f8cc8a03930bd902803a8b4288d6032977d3c35db4259b0445250e1e7b2" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.173734 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.195081 4765 scope.go:117] "RemoveContainer" containerID="b867a8d1fd276cf779b269125911d71fbdc4fda5701cf8688d4e1b310044e049" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.212537 4765 scope.go:117] "RemoveContainer" containerID="349bdf95834cb9f842e6b4c1d3638b41041514194cbcb5b970b43c4f5905a101" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.231263 4765 scope.go:117] "RemoveContainer" containerID="47cf7a985566866c36c60822d4497071985ac369da2fd01616c41dc5a2b1e19f" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.250176 4765 scope.go:117] "RemoveContainer" containerID="cc486fba5de7c78709592945f772f250acef64e3e50fa5a5b6a3fdbafcbf607a" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.268072 4765 scope.go:117] "RemoveContainer" containerID="f35d3757711e456d6892f446dd4774ffaa8413dd814648075fe8bf69e3089c63" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.286587 4765 scope.go:117] "RemoveContainer" containerID="d17735ab09308dc802722eda2a476a123f866d58e68d758cd7b67b2673a16eae" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.296959 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-lock\") pod \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.297070 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.297114 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift\") pod \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.297176 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-298hl\" (UniqueName: \"kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-kube-api-access-298hl\") pod \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.297220 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-cache\") pod \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\" (UID: \"d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86\") " Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.298012 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-cache" (OuterVolumeSpecName: "cache") pod "d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" (UID: "d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.298032 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-lock" (OuterVolumeSpecName: "lock") pod "d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" (UID: "d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.302671 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "swift") pod "d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" (UID: "d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.302833 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" (UID: "d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.303909 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-kube-api-access-298hl" (OuterVolumeSpecName: "kube-api-access-298hl") pod "d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" (UID: "d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86"). InnerVolumeSpecName "kube-api-access-298hl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.312248 4765 scope.go:117] "RemoveContainer" containerID="8a125681c2edc8966797c35fe861ce37d733c183b5ba7c09434cf876cdca5f73" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.385124 4765 scope.go:117] "RemoveContainer" containerID="9cd9b93d1dbfce955b210d123285bdd2fbb9f7b231672a4aad54ede9927aaae9" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.398960 4765 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-lock\") on node \"crc\" DevicePath \"\"" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.399014 4765 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.399026 4765 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.399036 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-298hl\" (UniqueName: \"kubernetes.io/projected/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-kube-api-access-298hl\") on node \"crc\" DevicePath \"\"" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.399045 4765 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86-cache\") on node \"crc\" DevicePath \"\"" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.403653 4765 scope.go:117] "RemoveContainer" containerID="4594560a05f9217a4d5f8e88170509508194fe66c2914b7f958548a105104d7e" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.412980 4765 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.421199 4765 scope.go:117] "RemoveContainer" containerID="7135e81e69285b42d0ac4c07ee634eaa2b3499547717a42a6aa67314ff49099f" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.439835 4765 scope.go:117] "RemoveContainer" containerID="d3c7fe75c0f925fb125a734c029f2059bdb6fd265030817676b5b3b8a052fd9b" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.458608 4765 scope.go:117] "RemoveContainer" containerID="b544a922da14385b7b5a0a7564e80057aa0adf7c8d4fad846360fa1ca4a7f25d" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.477816 4765 scope.go:117] "RemoveContainer" containerID="ffc515962d5270d5f14f12935475f1e6e227235b5b9a72a37f6927cde51add52" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.508556 4765 scope.go:117] "RemoveContainer" containerID="4a3613cd84d23f05f8318366d8316a06d3e5f1244631e3c33dbba7434ad440a0" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.510034 4765 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.511904 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.518539 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.527531 4765 scope.go:117] "RemoveContainer" containerID="39226f8cc8a03930bd902803a8b4288d6032977d3c35db4259b0445250e1e7b2" Dec 10 07:13:15 crc kubenswrapper[4765]: E1210 07:13:15.528036 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39226f8cc8a03930bd902803a8b4288d6032977d3c35db4259b0445250e1e7b2\": container with ID starting with 39226f8cc8a03930bd902803a8b4288d6032977d3c35db4259b0445250e1e7b2 not found: ID does not exist" containerID="39226f8cc8a03930bd902803a8b4288d6032977d3c35db4259b0445250e1e7b2" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.528070 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39226f8cc8a03930bd902803a8b4288d6032977d3c35db4259b0445250e1e7b2"} err="failed to get container status \"39226f8cc8a03930bd902803a8b4288d6032977d3c35db4259b0445250e1e7b2\": rpc error: code = NotFound desc = could not find container \"39226f8cc8a03930bd902803a8b4288d6032977d3c35db4259b0445250e1e7b2\": container with ID starting with 39226f8cc8a03930bd902803a8b4288d6032977d3c35db4259b0445250e1e7b2 not found: ID does not exist" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.528126 4765 scope.go:117] "RemoveContainer" containerID="b867a8d1fd276cf779b269125911d71fbdc4fda5701cf8688d4e1b310044e049" Dec 10 07:13:15 crc kubenswrapper[4765]: E1210 07:13:15.528649 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b867a8d1fd276cf779b269125911d71fbdc4fda5701cf8688d4e1b310044e049\": container with ID starting with b867a8d1fd276cf779b269125911d71fbdc4fda5701cf8688d4e1b310044e049 not found: ID does not exist" containerID="b867a8d1fd276cf779b269125911d71fbdc4fda5701cf8688d4e1b310044e049" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.528699 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b867a8d1fd276cf779b269125911d71fbdc4fda5701cf8688d4e1b310044e049"} err="failed to get container status \"b867a8d1fd276cf779b269125911d71fbdc4fda5701cf8688d4e1b310044e049\": rpc error: code = NotFound desc = could not find container \"b867a8d1fd276cf779b269125911d71fbdc4fda5701cf8688d4e1b310044e049\": container with ID starting with b867a8d1fd276cf779b269125911d71fbdc4fda5701cf8688d4e1b310044e049 not found: ID does not exist" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.528735 4765 scope.go:117] "RemoveContainer" containerID="349bdf95834cb9f842e6b4c1d3638b41041514194cbcb5b970b43c4f5905a101" Dec 10 07:13:15 crc kubenswrapper[4765]: E1210 07:13:15.529015 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"349bdf95834cb9f842e6b4c1d3638b41041514194cbcb5b970b43c4f5905a101\": container with ID starting with 349bdf95834cb9f842e6b4c1d3638b41041514194cbcb5b970b43c4f5905a101 not found: ID does not exist" containerID="349bdf95834cb9f842e6b4c1d3638b41041514194cbcb5b970b43c4f5905a101" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.529044 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"349bdf95834cb9f842e6b4c1d3638b41041514194cbcb5b970b43c4f5905a101"} err="failed to get container status \"349bdf95834cb9f842e6b4c1d3638b41041514194cbcb5b970b43c4f5905a101\": rpc error: code = NotFound desc = could not find container \"349bdf95834cb9f842e6b4c1d3638b41041514194cbcb5b970b43c4f5905a101\": container with ID starting with 349bdf95834cb9f842e6b4c1d3638b41041514194cbcb5b970b43c4f5905a101 not found: ID does not exist" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.529060 4765 scope.go:117] "RemoveContainer" containerID="47cf7a985566866c36c60822d4497071985ac369da2fd01616c41dc5a2b1e19f" Dec 10 07:13:15 crc kubenswrapper[4765]: E1210 07:13:15.529405 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47cf7a985566866c36c60822d4497071985ac369da2fd01616c41dc5a2b1e19f\": container with ID starting with 47cf7a985566866c36c60822d4497071985ac369da2fd01616c41dc5a2b1e19f not found: ID does not exist" containerID="47cf7a985566866c36c60822d4497071985ac369da2fd01616c41dc5a2b1e19f" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.529427 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47cf7a985566866c36c60822d4497071985ac369da2fd01616c41dc5a2b1e19f"} err="failed to get container status \"47cf7a985566866c36c60822d4497071985ac369da2fd01616c41dc5a2b1e19f\": rpc error: code = NotFound desc = could not find container \"47cf7a985566866c36c60822d4497071985ac369da2fd01616c41dc5a2b1e19f\": container with ID starting with 47cf7a985566866c36c60822d4497071985ac369da2fd01616c41dc5a2b1e19f not found: ID does not exist" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.529441 4765 scope.go:117] "RemoveContainer" containerID="cc486fba5de7c78709592945f772f250acef64e3e50fa5a5b6a3fdbafcbf607a" Dec 10 07:13:15 crc kubenswrapper[4765]: E1210 07:13:15.529688 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc486fba5de7c78709592945f772f250acef64e3e50fa5a5b6a3fdbafcbf607a\": container with ID starting with cc486fba5de7c78709592945f772f250acef64e3e50fa5a5b6a3fdbafcbf607a not found: ID does not exist" containerID="cc486fba5de7c78709592945f772f250acef64e3e50fa5a5b6a3fdbafcbf607a" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.529705 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc486fba5de7c78709592945f772f250acef64e3e50fa5a5b6a3fdbafcbf607a"} err="failed to get container status \"cc486fba5de7c78709592945f772f250acef64e3e50fa5a5b6a3fdbafcbf607a\": rpc error: code = NotFound desc = could not find container \"cc486fba5de7c78709592945f772f250acef64e3e50fa5a5b6a3fdbafcbf607a\": container with ID starting with cc486fba5de7c78709592945f772f250acef64e3e50fa5a5b6a3fdbafcbf607a not found: ID does not exist" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.529720 4765 scope.go:117] "RemoveContainer" containerID="f35d3757711e456d6892f446dd4774ffaa8413dd814648075fe8bf69e3089c63" Dec 10 07:13:15 crc kubenswrapper[4765]: E1210 07:13:15.529907 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f35d3757711e456d6892f446dd4774ffaa8413dd814648075fe8bf69e3089c63\": container with ID starting with f35d3757711e456d6892f446dd4774ffaa8413dd814648075fe8bf69e3089c63 not found: ID does not exist" containerID="f35d3757711e456d6892f446dd4774ffaa8413dd814648075fe8bf69e3089c63" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.529928 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f35d3757711e456d6892f446dd4774ffaa8413dd814648075fe8bf69e3089c63"} err="failed to get container status \"f35d3757711e456d6892f446dd4774ffaa8413dd814648075fe8bf69e3089c63\": rpc error: code = NotFound desc = could not find container \"f35d3757711e456d6892f446dd4774ffaa8413dd814648075fe8bf69e3089c63\": container with ID starting with f35d3757711e456d6892f446dd4774ffaa8413dd814648075fe8bf69e3089c63 not found: ID does not exist" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.529940 4765 scope.go:117] "RemoveContainer" containerID="d17735ab09308dc802722eda2a476a123f866d58e68d758cd7b67b2673a16eae" Dec 10 07:13:15 crc kubenswrapper[4765]: E1210 07:13:15.530202 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d17735ab09308dc802722eda2a476a123f866d58e68d758cd7b67b2673a16eae\": container with ID starting with d17735ab09308dc802722eda2a476a123f866d58e68d758cd7b67b2673a16eae not found: ID does not exist" containerID="d17735ab09308dc802722eda2a476a123f866d58e68d758cd7b67b2673a16eae" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.530221 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d17735ab09308dc802722eda2a476a123f866d58e68d758cd7b67b2673a16eae"} err="failed to get container status \"d17735ab09308dc802722eda2a476a123f866d58e68d758cd7b67b2673a16eae\": rpc error: code = NotFound desc = could not find container \"d17735ab09308dc802722eda2a476a123f866d58e68d758cd7b67b2673a16eae\": container with ID starting with d17735ab09308dc802722eda2a476a123f866d58e68d758cd7b67b2673a16eae not found: ID does not exist" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.530233 4765 scope.go:117] "RemoveContainer" containerID="8a125681c2edc8966797c35fe861ce37d733c183b5ba7c09434cf876cdca5f73" Dec 10 07:13:15 crc kubenswrapper[4765]: E1210 07:13:15.530410 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a125681c2edc8966797c35fe861ce37d733c183b5ba7c09434cf876cdca5f73\": container with ID starting with 8a125681c2edc8966797c35fe861ce37d733c183b5ba7c09434cf876cdca5f73 not found: ID does not exist" containerID="8a125681c2edc8966797c35fe861ce37d733c183b5ba7c09434cf876cdca5f73" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.530428 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a125681c2edc8966797c35fe861ce37d733c183b5ba7c09434cf876cdca5f73"} err="failed to get container status \"8a125681c2edc8966797c35fe861ce37d733c183b5ba7c09434cf876cdca5f73\": rpc error: code = NotFound desc = could not find container \"8a125681c2edc8966797c35fe861ce37d733c183b5ba7c09434cf876cdca5f73\": container with ID starting with 8a125681c2edc8966797c35fe861ce37d733c183b5ba7c09434cf876cdca5f73 not found: ID does not exist" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.530439 4765 scope.go:117] "RemoveContainer" containerID="9cd9b93d1dbfce955b210d123285bdd2fbb9f7b231672a4aad54ede9927aaae9" Dec 10 07:13:15 crc kubenswrapper[4765]: E1210 07:13:15.530619 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9cd9b93d1dbfce955b210d123285bdd2fbb9f7b231672a4aad54ede9927aaae9\": container with ID starting with 9cd9b93d1dbfce955b210d123285bdd2fbb9f7b231672a4aad54ede9927aaae9 not found: ID does not exist" containerID="9cd9b93d1dbfce955b210d123285bdd2fbb9f7b231672a4aad54ede9927aaae9" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.530636 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cd9b93d1dbfce955b210d123285bdd2fbb9f7b231672a4aad54ede9927aaae9"} err="failed to get container status \"9cd9b93d1dbfce955b210d123285bdd2fbb9f7b231672a4aad54ede9927aaae9\": rpc error: code = NotFound desc = could not find container \"9cd9b93d1dbfce955b210d123285bdd2fbb9f7b231672a4aad54ede9927aaae9\": container with ID starting with 9cd9b93d1dbfce955b210d123285bdd2fbb9f7b231672a4aad54ede9927aaae9 not found: ID does not exist" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.530648 4765 scope.go:117] "RemoveContainer" containerID="4594560a05f9217a4d5f8e88170509508194fe66c2914b7f958548a105104d7e" Dec 10 07:13:15 crc kubenswrapper[4765]: E1210 07:13:15.530834 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4594560a05f9217a4d5f8e88170509508194fe66c2914b7f958548a105104d7e\": container with ID starting with 4594560a05f9217a4d5f8e88170509508194fe66c2914b7f958548a105104d7e not found: ID does not exist" containerID="4594560a05f9217a4d5f8e88170509508194fe66c2914b7f958548a105104d7e" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.530851 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4594560a05f9217a4d5f8e88170509508194fe66c2914b7f958548a105104d7e"} err="failed to get container status \"4594560a05f9217a4d5f8e88170509508194fe66c2914b7f958548a105104d7e\": rpc error: code = NotFound desc = could not find container \"4594560a05f9217a4d5f8e88170509508194fe66c2914b7f958548a105104d7e\": container with ID starting with 4594560a05f9217a4d5f8e88170509508194fe66c2914b7f958548a105104d7e not found: ID does not exist" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.530864 4765 scope.go:117] "RemoveContainer" containerID="7135e81e69285b42d0ac4c07ee634eaa2b3499547717a42a6aa67314ff49099f" Dec 10 07:13:15 crc kubenswrapper[4765]: E1210 07:13:15.531038 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7135e81e69285b42d0ac4c07ee634eaa2b3499547717a42a6aa67314ff49099f\": container with ID starting with 7135e81e69285b42d0ac4c07ee634eaa2b3499547717a42a6aa67314ff49099f not found: ID does not exist" containerID="7135e81e69285b42d0ac4c07ee634eaa2b3499547717a42a6aa67314ff49099f" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.531057 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7135e81e69285b42d0ac4c07ee634eaa2b3499547717a42a6aa67314ff49099f"} err="failed to get container status \"7135e81e69285b42d0ac4c07ee634eaa2b3499547717a42a6aa67314ff49099f\": rpc error: code = NotFound desc = could not find container \"7135e81e69285b42d0ac4c07ee634eaa2b3499547717a42a6aa67314ff49099f\": container with ID starting with 7135e81e69285b42d0ac4c07ee634eaa2b3499547717a42a6aa67314ff49099f not found: ID does not exist" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.531068 4765 scope.go:117] "RemoveContainer" containerID="d3c7fe75c0f925fb125a734c029f2059bdb6fd265030817676b5b3b8a052fd9b" Dec 10 07:13:15 crc kubenswrapper[4765]: E1210 07:13:15.531243 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3c7fe75c0f925fb125a734c029f2059bdb6fd265030817676b5b3b8a052fd9b\": container with ID starting with d3c7fe75c0f925fb125a734c029f2059bdb6fd265030817676b5b3b8a052fd9b not found: ID does not exist" containerID="d3c7fe75c0f925fb125a734c029f2059bdb6fd265030817676b5b3b8a052fd9b" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.531260 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3c7fe75c0f925fb125a734c029f2059bdb6fd265030817676b5b3b8a052fd9b"} err="failed to get container status \"d3c7fe75c0f925fb125a734c029f2059bdb6fd265030817676b5b3b8a052fd9b\": rpc error: code = NotFound desc = could not find container \"d3c7fe75c0f925fb125a734c029f2059bdb6fd265030817676b5b3b8a052fd9b\": container with ID starting with d3c7fe75c0f925fb125a734c029f2059bdb6fd265030817676b5b3b8a052fd9b not found: ID does not exist" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.531276 4765 scope.go:117] "RemoveContainer" containerID="b544a922da14385b7b5a0a7564e80057aa0adf7c8d4fad846360fa1ca4a7f25d" Dec 10 07:13:15 crc kubenswrapper[4765]: E1210 07:13:15.531457 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b544a922da14385b7b5a0a7564e80057aa0adf7c8d4fad846360fa1ca4a7f25d\": container with ID starting with b544a922da14385b7b5a0a7564e80057aa0adf7c8d4fad846360fa1ca4a7f25d not found: ID does not exist" containerID="b544a922da14385b7b5a0a7564e80057aa0adf7c8d4fad846360fa1ca4a7f25d" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.531474 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b544a922da14385b7b5a0a7564e80057aa0adf7c8d4fad846360fa1ca4a7f25d"} err="failed to get container status \"b544a922da14385b7b5a0a7564e80057aa0adf7c8d4fad846360fa1ca4a7f25d\": rpc error: code = NotFound desc = could not find container \"b544a922da14385b7b5a0a7564e80057aa0adf7c8d4fad846360fa1ca4a7f25d\": container with ID starting with b544a922da14385b7b5a0a7564e80057aa0adf7c8d4fad846360fa1ca4a7f25d not found: ID does not exist" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.531485 4765 scope.go:117] "RemoveContainer" containerID="ffc515962d5270d5f14f12935475f1e6e227235b5b9a72a37f6927cde51add52" Dec 10 07:13:15 crc kubenswrapper[4765]: E1210 07:13:15.531662 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffc515962d5270d5f14f12935475f1e6e227235b5b9a72a37f6927cde51add52\": container with ID starting with ffc515962d5270d5f14f12935475f1e6e227235b5b9a72a37f6927cde51add52 not found: ID does not exist" containerID="ffc515962d5270d5f14f12935475f1e6e227235b5b9a72a37f6927cde51add52" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.531684 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffc515962d5270d5f14f12935475f1e6e227235b5b9a72a37f6927cde51add52"} err="failed to get container status \"ffc515962d5270d5f14f12935475f1e6e227235b5b9a72a37f6927cde51add52\": rpc error: code = NotFound desc = could not find container \"ffc515962d5270d5f14f12935475f1e6e227235b5b9a72a37f6927cde51add52\": container with ID starting with ffc515962d5270d5f14f12935475f1e6e227235b5b9a72a37f6927cde51add52 not found: ID does not exist" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.531700 4765 scope.go:117] "RemoveContainer" containerID="4a3613cd84d23f05f8318366d8316a06d3e5f1244631e3c33dbba7434ad440a0" Dec 10 07:13:15 crc kubenswrapper[4765]: E1210 07:13:15.531915 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a3613cd84d23f05f8318366d8316a06d3e5f1244631e3c33dbba7434ad440a0\": container with ID starting with 4a3613cd84d23f05f8318366d8316a06d3e5f1244631e3c33dbba7434ad440a0 not found: ID does not exist" containerID="4a3613cd84d23f05f8318366d8316a06d3e5f1244631e3c33dbba7434ad440a0" Dec 10 07:13:15 crc kubenswrapper[4765]: I1210 07:13:15.531933 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a3613cd84d23f05f8318366d8316a06d3e5f1244631e3c33dbba7434ad440a0"} err="failed to get container status \"4a3613cd84d23f05f8318366d8316a06d3e5f1244631e3c33dbba7434ad440a0\": rpc error: code = NotFound desc = could not find container \"4a3613cd84d23f05f8318366d8316a06d3e5f1244631e3c33dbba7434ad440a0\": container with ID starting with 4a3613cd84d23f05f8318366d8316a06d3e5f1244631e3c33dbba7434ad440a0 not found: ID does not exist" Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.284626 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-v6h5d_209844a2-e0ac-447f-99f6-28cd864ca648/ovs-vswitchd/0.log" Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.285413 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.322691 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-var-log\") pod \"209844a2-e0ac-447f-99f6-28cd864ca648\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.322774 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-etc-ovs\") pod \"209844a2-e0ac-447f-99f6-28cd864ca648\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.322839 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqlnr\" (UniqueName: \"kubernetes.io/projected/209844a2-e0ac-447f-99f6-28cd864ca648-kube-api-access-jqlnr\") pod \"209844a2-e0ac-447f-99f6-28cd864ca648\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.322872 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-var-lib\") pod \"209844a2-e0ac-447f-99f6-28cd864ca648\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.322936 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/209844a2-e0ac-447f-99f6-28cd864ca648-scripts\") pod \"209844a2-e0ac-447f-99f6-28cd864ca648\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.322982 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-var-run\") pod \"209844a2-e0ac-447f-99f6-28cd864ca648\" (UID: \"209844a2-e0ac-447f-99f6-28cd864ca648\") " Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.323393 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-var-run" (OuterVolumeSpecName: "var-run") pod "209844a2-e0ac-447f-99f6-28cd864ca648" (UID: "209844a2-e0ac-447f-99f6-28cd864ca648"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.323435 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-var-log" (OuterVolumeSpecName: "var-log") pod "209844a2-e0ac-447f-99f6-28cd864ca648" (UID: "209844a2-e0ac-447f-99f6-28cd864ca648"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.323453 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "209844a2-e0ac-447f-99f6-28cd864ca648" (UID: "209844a2-e0ac-447f-99f6-28cd864ca648"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.324136 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-var-lib" (OuterVolumeSpecName: "var-lib") pod "209844a2-e0ac-447f-99f6-28cd864ca648" (UID: "209844a2-e0ac-447f-99f6-28cd864ca648"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.325130 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/209844a2-e0ac-447f-99f6-28cd864ca648-scripts" (OuterVolumeSpecName: "scripts") pod "209844a2-e0ac-447f-99f6-28cd864ca648" (UID: "209844a2-e0ac-447f-99f6-28cd864ca648"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.329441 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/209844a2-e0ac-447f-99f6-28cd864ca648-kube-api-access-jqlnr" (OuterVolumeSpecName: "kube-api-access-jqlnr") pod "209844a2-e0ac-447f-99f6-28cd864ca648" (UID: "209844a2-e0ac-447f-99f6-28cd864ca648"). InnerVolumeSpecName "kube-api-access-jqlnr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.424839 4765 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-var-log\") on node \"crc\" DevicePath \"\"" Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.424889 4765 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-etc-ovs\") on node \"crc\" DevicePath \"\"" Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.424900 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqlnr\" (UniqueName: \"kubernetes.io/projected/209844a2-e0ac-447f-99f6-28cd864ca648-kube-api-access-jqlnr\") on node \"crc\" DevicePath \"\"" Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.424910 4765 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-var-lib\") on node \"crc\" DevicePath \"\"" Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.424918 4765 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/209844a2-e0ac-447f-99f6-28cd864ca648-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.424925 4765 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/209844a2-e0ac-447f-99f6-28cd864ca648-var-run\") on node \"crc\" DevicePath \"\"" Dec 10 07:13:16 crc kubenswrapper[4765]: I1210 07:13:16.597798 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" path="/var/lib/kubelet/pods/d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86/volumes" Dec 10 07:13:17 crc kubenswrapper[4765]: I1210 07:13:17.193034 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-v6h5d_209844a2-e0ac-447f-99f6-28cd864ca648/ovs-vswitchd/0.log" Dec 10 07:13:17 crc kubenswrapper[4765]: I1210 07:13:17.193933 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-v6h5d" event={"ID":"209844a2-e0ac-447f-99f6-28cd864ca648","Type":"ContainerDied","Data":"8c9e44c5075dd308c10c91908b1b3130726915aed6ef884b3965f357982cf285"} Dec 10 07:13:17 crc kubenswrapper[4765]: I1210 07:13:17.193989 4765 scope.go:117] "RemoveContainer" containerID="d78be4bde0ea0c28c26cb5748e1e077c78d8bdf44101348ba52ae79f9f5b0079" Dec 10 07:13:17 crc kubenswrapper[4765]: I1210 07:13:17.194247 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-v6h5d" Dec 10 07:13:17 crc kubenswrapper[4765]: I1210 07:13:17.220741 4765 scope.go:117] "RemoveContainer" containerID="1c982b32b25bec84d5131cc19ad8748f2b57c08b8ab7de7b1c3d3e741e216dab" Dec 10 07:13:17 crc kubenswrapper[4765]: I1210 07:13:17.222729 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-v6h5d"] Dec 10 07:13:17 crc kubenswrapper[4765]: I1210 07:13:17.230384 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-v6h5d"] Dec 10 07:13:17 crc kubenswrapper[4765]: I1210 07:13:17.242738 4765 scope.go:117] "RemoveContainer" containerID="f133729f179c48ec9b76c236a6a3f8aeeacde532a47048ee4773738cc978f7d1" Dec 10 07:13:17 crc kubenswrapper[4765]: I1210 07:13:17.292059 4765 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod416ec9f7-82f7-4eb1-a936-51038c6da878"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod416ec9f7-82f7-4eb1-a936-51038c6da878] : Timed out while waiting for systemd to remove kubepods-besteffort-pod416ec9f7_82f7_4eb1_a936_51038c6da878.slice" Dec 10 07:13:17 crc kubenswrapper[4765]: E1210 07:13:17.292132 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod416ec9f7-82f7-4eb1-a936-51038c6da878] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod416ec9f7-82f7-4eb1-a936-51038c6da878] : Timed out while waiting for systemd to remove kubepods-besteffort-pod416ec9f7_82f7_4eb1_a936_51038c6da878.slice" pod="openstack/ovn-controller-metrics-hhk4h" podUID="416ec9f7-82f7-4eb1-a936-51038c6da878" Dec 10 07:13:17 crc kubenswrapper[4765]: I1210 07:13:17.301351 4765 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podea4a2d0b-62e8-4527-948f-9f9c76070af1"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podea4a2d0b-62e8-4527-948f-9f9c76070af1] : Timed out while waiting for systemd to remove kubepods-besteffort-podea4a2d0b_62e8_4527_948f_9f9c76070af1.slice" Dec 10 07:13:17 crc kubenswrapper[4765]: E1210 07:13:17.301411 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort podea4a2d0b-62e8-4527-948f-9f9c76070af1] : unable to destroy cgroup paths for cgroup [kubepods besteffort podea4a2d0b-62e8-4527-948f-9f9c76070af1] : Timed out while waiting for systemd to remove kubepods-besteffort-podea4a2d0b_62e8_4527_948f_9f9c76070af1.slice" pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" podUID="ea4a2d0b-62e8-4527-948f-9f9c76070af1" Dec 10 07:13:17 crc kubenswrapper[4765]: I1210 07:13:17.855259 4765 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod3d46af4c-da3b-47cb-a069-cb978f0df610"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod3d46af4c-da3b-47cb-a069-cb978f0df610] : Timed out while waiting for systemd to remove kubepods-besteffort-pod3d46af4c_da3b_47cb_a069_cb978f0df610.slice" Dec 10 07:13:18 crc kubenswrapper[4765]: I1210 07:13:18.203022 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-hhk4h" Dec 10 07:13:18 crc kubenswrapper[4765]: I1210 07:13:18.203775 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56c6c8bc97-9hn2v" Dec 10 07:13:18 crc kubenswrapper[4765]: I1210 07:13:18.225259 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56c6c8bc97-9hn2v"] Dec 10 07:13:18 crc kubenswrapper[4765]: I1210 07:13:18.230004 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56c6c8bc97-9hn2v"] Dec 10 07:13:18 crc kubenswrapper[4765]: I1210 07:13:18.244258 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-hhk4h"] Dec 10 07:13:18 crc kubenswrapper[4765]: I1210 07:13:18.265061 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-hhk4h"] Dec 10 07:13:18 crc kubenswrapper[4765]: I1210 07:13:18.602659 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" path="/var/lib/kubelet/pods/209844a2-e0ac-447f-99f6-28cd864ca648/volumes" Dec 10 07:13:18 crc kubenswrapper[4765]: I1210 07:13:18.603469 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="416ec9f7-82f7-4eb1-a936-51038c6da878" path="/var/lib/kubelet/pods/416ec9f7-82f7-4eb1-a936-51038c6da878/volumes" Dec 10 07:13:18 crc kubenswrapper[4765]: I1210 07:13:18.604210 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea4a2d0b-62e8-4527-948f-9f9c76070af1" path="/var/lib/kubelet/pods/ea4a2d0b-62e8-4527-948f-9f9c76070af1/volumes" Dec 10 07:13:26 crc kubenswrapper[4765]: I1210 07:13:26.586684 4765 scope.go:117] "RemoveContainer" containerID="a2c8bf082e2ca39c600927ef90c7c46793163400415d1bf326aea293998c0b6d" Dec 10 07:13:26 crc kubenswrapper[4765]: I1210 07:13:26.613816 4765 scope.go:117] "RemoveContainer" containerID="b5e74a15b203ef52f851095f1cc16190a0f4872664a016876134693584df8e67" Dec 10 07:13:34 crc kubenswrapper[4765]: I1210 07:13:34.049689 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:13:34 crc kubenswrapper[4765]: I1210 07:13:34.050383 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:14:04 crc kubenswrapper[4765]: I1210 07:14:04.049339 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:14:04 crc kubenswrapper[4765]: I1210 07:14:04.049943 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:14:04 crc kubenswrapper[4765]: I1210 07:14:04.049997 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 07:14:04 crc kubenswrapper[4765]: I1210 07:14:04.050725 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083"} pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 07:14:04 crc kubenswrapper[4765]: I1210 07:14:04.050786 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" containerID="cri-o://35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" gracePeriod=600 Dec 10 07:14:04 crc kubenswrapper[4765]: E1210 07:14:04.175827 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:14:04 crc kubenswrapper[4765]: I1210 07:14:04.626614 4765 generic.go:334] "Generic (PLEG): container finished" podID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" exitCode=0 Dec 10 07:14:04 crc kubenswrapper[4765]: I1210 07:14:04.626688 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerDied","Data":"35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083"} Dec 10 07:14:04 crc kubenswrapper[4765]: I1210 07:14:04.626783 4765 scope.go:117] "RemoveContainer" containerID="1c86e2badb0bfc9f0eb664aadfc0af5a709c20f5327fa62e0a9911a7da8c407c" Dec 10 07:14:04 crc kubenswrapper[4765]: I1210 07:14:04.627543 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:14:04 crc kubenswrapper[4765]: E1210 07:14:04.627822 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.805535 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vbpr7"] Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806216 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e9d4a75-10e4-46dd-9180-821c917a2b5e" containerName="neutron-httpd" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806233 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e9d4a75-10e4-46dd-9180-821c917a2b5e" containerName="neutron-httpd" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806254 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b62b966-7b0a-4099-977c-44682f703187" containerName="ovn-northd" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806262 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b62b966-7b0a-4099-977c-44682f703187" containerName="ovn-northd" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806280 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerName="sg-core" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806289 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerName="sg-core" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806299 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea4a2d0b-62e8-4527-948f-9f9c76070af1" containerName="init" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806306 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea4a2d0b-62e8-4527-948f-9f9c76070af1" containerName="init" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806316 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="857ecb8b-72dc-40a7-a407-d85c40c40bcf" containerName="keystone-api" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806323 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="857ecb8b-72dc-40a7-a407-d85c40c40bcf" containerName="keystone-api" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806335 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="account-auditor" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806343 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="account-auditor" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806355 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cbf2f96-d196-413b-841a-9b753e6beae2" containerName="placement-api" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806362 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cbf2f96-d196-413b-841a-9b753e6beae2" containerName="placement-api" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806374 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-replicator" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806381 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-replicator" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806395 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f429f6df-3510-4c6a-b9e8-062895218832" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806401 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f429f6df-3510-4c6a-b9e8-062895218832" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806418 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="949ef1d3-9f74-4052-a482-9fea4e48d374" containerName="mysql-bootstrap" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806425 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="949ef1d3-9f74-4052-a482-9fea4e48d374" containerName="mysql-bootstrap" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806440 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806449 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806456 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="account-server" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806464 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="account-server" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806476 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="container-server" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806483 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="container-server" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806502 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="202a617e-eb55-4702-8958-3502b6d8e91b" containerName="cinder-api-log" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806509 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="202a617e-eb55-4702-8958-3502b6d8e91b" containerName="cinder-api-log" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806518 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" containerName="openstack-network-exporter" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806525 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" containerName="openstack-network-exporter" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806533 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="865e0a69-0d85-4d93-9d38-f52449d09d87" containerName="barbican-worker-log" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806539 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="865e0a69-0d85-4d93-9d38-f52449d09d87" containerName="barbican-worker-log" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806551 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovs-vswitchd" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806558 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovs-vswitchd" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806574 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806581 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806594 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5b765ff-9a5a-48b8-80c7-e8bd56613fcc" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806603 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5b765ff-9a5a-48b8-80c7-e8bd56613fcc" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806612 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d035962-836c-48cf-8ea4-a3e5a23f58f9" containerName="setup-container" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806619 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d035962-836c-48cf-8ea4-a3e5a23f58f9" containerName="setup-container" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806629 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4406d4e6-b2a9-4e81-9672-b54775fad3bb" containerName="cinder-scheduler" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806636 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="4406d4e6-b2a9-4e81-9672-b54775fad3bb" containerName="cinder-scheduler" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806645 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6c300c8-c9c7-40c3-8874-236b21eb4856" containerName="kube-state-metrics" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806652 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6c300c8-c9c7-40c3-8874-236b21eb4856" containerName="kube-state-metrics" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806664 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48199101-c7d2-4881-98bd-53d14d7308d5" containerName="glance-log" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806672 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="48199101-c7d2-4881-98bd-53d14d7308d5" containerName="glance-log" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806684 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bbedff0-5b89-4bbb-b308-6ccb13c8216c" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806692 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bbedff0-5b89-4bbb-b308-6ccb13c8216c" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806706 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="416ec9f7-82f7-4eb1-a936-51038c6da878" containerName="openstack-network-exporter" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806714 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="416ec9f7-82f7-4eb1-a936-51038c6da878" containerName="openstack-network-exporter" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806724 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1099ee9-e4d7-496f-b35e-7617ee456898" containerName="glance-httpd" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806731 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1099ee9-e4d7-496f-b35e-7617ee456898" containerName="glance-httpd" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806743 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea4a2d0b-62e8-4527-948f-9f9c76070af1" containerName="dnsmasq-dns" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806751 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea4a2d0b-62e8-4527-948f-9f9c76070af1" containerName="dnsmasq-dns" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806765 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b62b966-7b0a-4099-977c-44682f703187" containerName="openstack-network-exporter" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806772 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b62b966-7b0a-4099-977c-44682f703187" containerName="openstack-network-exporter" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806781 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d11b24c2-0ac0-4f23-a575-d1b80db4ba11" containerName="nova-scheduler-scheduler" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806788 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d11b24c2-0ac0-4f23-a575-d1b80db4ba11" containerName="nova-scheduler-scheduler" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806817 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806825 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806837 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a99712e9-cab7-452c-9df1-d94b5c4d96af" containerName="barbican-api-log" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806844 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a99712e9-cab7-452c-9df1-d94b5c4d96af" containerName="barbican-api-log" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806857 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfc6fea8-973e-42c9-9482-a4853abec6c1" containerName="proxy-server" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806864 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfc6fea8-973e-42c9-9482-a4853abec6c1" containerName="proxy-server" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806877 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="account-reaper" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806884 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="account-reaper" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806893 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="account-replicator" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806900 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="account-replicator" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806912 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="container-replicator" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806919 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="container-replicator" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806929 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48199101-c7d2-4881-98bd-53d14d7308d5" containerName="glance-httpd" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806936 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="48199101-c7d2-4881-98bd-53d14d7308d5" containerName="glance-httpd" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806945 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dbd0d4a-660a-4887-83ae-25c00f54196a" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806952 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dbd0d4a-660a-4887-83ae-25c00f54196a" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806962 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cbf2f96-d196-413b-841a-9b753e6beae2" containerName="placement-log" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806969 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cbf2f96-d196-413b-841a-9b753e6beae2" containerName="placement-log" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806978 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="949ef1d3-9f74-4052-a482-9fea4e48d374" containerName="galera" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.806985 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="949ef1d3-9f74-4052-a482-9fea4e48d374" containerName="galera" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.806994 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9af12be-6bc5-4aa8-bb84-135e3c0727cb" containerName="nova-cell1-novncproxy-novncproxy" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807003 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9af12be-6bc5-4aa8-bb84-135e3c0727cb" containerName="nova-cell1-novncproxy-novncproxy" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807014 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7694f523-adf7-4964-b475-6cd94cac7d75" containerName="nova-cell1-conductor-conductor" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807022 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="7694f523-adf7-4964-b475-6cd94cac7d75" containerName="nova-cell1-conductor-conductor" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807033 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efa8eaec-19fb-43da-a1f3-557b0847e966" containerName="nova-api-log" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807040 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="efa8eaec-19fb-43da-a1f3-557b0847e966" containerName="nova-api-log" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807052 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-auditor" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807059 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-auditor" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807073 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="swift-recon-cron" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807080 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="swift-recon-cron" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807110 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a99712e9-cab7-452c-9df1-d94b5c4d96af" containerName="barbican-api" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807119 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a99712e9-cab7-452c-9df1-d94b5c4d96af" containerName="barbican-api" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807131 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-server" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807139 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-server" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807152 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c417c995-d247-48fe-afea-472a698e27f4" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807159 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c417c995-d247-48fe-afea-472a698e27f4" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807173 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" containerName="nova-metadata-log" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807180 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" containerName="nova-metadata-log" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807191 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="rsync" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807198 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="rsync" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807209 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovsdb-server-init" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807217 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovsdb-server-init" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807226 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerName="ceilometer-central-agent" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807234 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerName="ceilometer-central-agent" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807266 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78b416b3-3796-4fa3-8a4f-7fa6107d98a1" containerName="rabbitmq" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807273 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="78b416b3-3796-4fa3-8a4f-7fa6107d98a1" containerName="rabbitmq" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807283 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="367a49cf-488a-4852-8728-78dacbfbd500" containerName="barbican-keystone-listener-log" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807291 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="367a49cf-488a-4852-8728-78dacbfbd500" containerName="barbican-keystone-listener-log" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807301 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerName="proxy-httpd" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807307 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerName="proxy-httpd" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807313 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="865e0a69-0d85-4d93-9d38-f52449d09d87" containerName="barbican-worker" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807318 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="865e0a69-0d85-4d93-9d38-f52449d09d87" containerName="barbican-worker" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807328 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="367a49cf-488a-4852-8728-78dacbfbd500" containerName="barbican-keystone-listener" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807335 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="367a49cf-488a-4852-8728-78dacbfbd500" containerName="barbican-keystone-listener" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807344 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-updater" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807349 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-updater" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807359 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-expirer" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807365 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-expirer" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807374 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fec9b72-da40-4d40-9f56-5eca02badaba" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807380 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fec9b72-da40-4d40-9f56-5eca02badaba" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807390 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerName="ceilometer-notification-agent" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807398 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerName="ceilometer-notification-agent" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807407 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27b317a6-1f99-4951-a064-e8ca8a38dc94" containerName="mysql-bootstrap" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807414 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="27b317a6-1f99-4951-a064-e8ca8a38dc94" containerName="mysql-bootstrap" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807423 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb882c24-ec9a-4e19-99ac-b6f96c420cb5" containerName="ovsdbserver-nb" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807430 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb882c24-ec9a-4e19-99ac-b6f96c420cb5" containerName="ovsdbserver-nb" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807439 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e9d4a75-10e4-46dd-9180-821c917a2b5e" containerName="neutron-api" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807446 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e9d4a75-10e4-46dd-9180-821c917a2b5e" containerName="neutron-api" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807456 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="container-auditor" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807463 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="container-auditor" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807472 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d035962-836c-48cf-8ea4-a3e5a23f58f9" containerName="rabbitmq" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807480 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d035962-836c-48cf-8ea4-a3e5a23f58f9" containerName="rabbitmq" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807491 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="container-updater" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807499 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="container-updater" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807509 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovsdb-server" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807516 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovsdb-server" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807527 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efa8eaec-19fb-43da-a1f3-557b0847e966" containerName="nova-api-api" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807534 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="efa8eaec-19fb-43da-a1f3-557b0847e966" containerName="nova-api-api" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807542 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03838926-8208-43dc-9bfd-6af312a938a4" containerName="nova-cell0-conductor-conductor" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807549 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="03838926-8208-43dc-9bfd-6af312a938a4" containerName="nova-cell0-conductor-conductor" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807563 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4406d4e6-b2a9-4e81-9672-b54775fad3bb" containerName="probe" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807571 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="4406d4e6-b2a9-4e81-9672-b54775fad3bb" containerName="probe" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807583 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfc6fea8-973e-42c9-9482-a4853abec6c1" containerName="proxy-httpd" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807592 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfc6fea8-973e-42c9-9482-a4853abec6c1" containerName="proxy-httpd" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807603 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="202a617e-eb55-4702-8958-3502b6d8e91b" containerName="cinder-api" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807610 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="202a617e-eb55-4702-8958-3502b6d8e91b" containerName="cinder-api" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807618 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1099ee9-e4d7-496f-b35e-7617ee456898" containerName="glance-log" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807625 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1099ee9-e4d7-496f-b35e-7617ee456898" containerName="glance-log" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807635 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27b317a6-1f99-4951-a064-e8ca8a38dc94" containerName="galera" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807645 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="27b317a6-1f99-4951-a064-e8ca8a38dc94" containerName="galera" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807655 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" containerName="ovsdbserver-sb" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807662 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" containerName="ovsdbserver-sb" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807674 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="655c5f62-880e-4e05-9db8-da19844facdf" containerName="memcached" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807681 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="655c5f62-880e-4e05-9db8-da19844facdf" containerName="memcached" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807691 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb882c24-ec9a-4e19-99ac-b6f96c420cb5" containerName="openstack-network-exporter" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807698 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb882c24-ec9a-4e19-99ac-b6f96c420cb5" containerName="openstack-network-exporter" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807711 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78b416b3-3796-4fa3-8a4f-7fa6107d98a1" containerName="setup-container" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807717 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="78b416b3-3796-4fa3-8a4f-7fa6107d98a1" containerName="setup-container" Dec 10 07:14:06 crc kubenswrapper[4765]: E1210 07:14:06.807728 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" containerName="nova-metadata-metadata" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807736 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" containerName="nova-metadata-metadata" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807944 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cbf2f96-d196-413b-841a-9b753e6beae2" containerName="placement-log" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807962 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="rsync" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807972 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-replicator" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807983 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="efa8eaec-19fb-43da-a1f3-557b0847e966" containerName="nova-api-api" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.807992 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="4406d4e6-b2a9-4e81-9672-b54775fad3bb" containerName="cinder-scheduler" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808006 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="4406d4e6-b2a9-4e81-9672-b54775fad3bb" containerName="probe" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808015 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" containerName="openstack-network-exporter" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808029 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="865e0a69-0d85-4d93-9d38-f52449d09d87" containerName="barbican-worker-log" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808042 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1099ee9-e4d7-496f-b35e-7617ee456898" containerName="glance-log" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808052 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="container-auditor" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808063 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovsdb-server" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808071 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="202a617e-eb55-4702-8958-3502b6d8e91b" containerName="cinder-api" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808105 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cbf2f96-d196-413b-841a-9b753e6beae2" containerName="placement-api" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808119 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="c417c995-d247-48fe-afea-472a698e27f4" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808132 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba3c2b8a-ad6f-4a4b-8d8d-0d27120b95d4" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808146 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="655c5f62-880e-4e05-9db8-da19844facdf" containerName="memcached" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808159 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-expirer" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808167 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="367a49cf-488a-4852-8728-78dacbfbd500" containerName="barbican-keystone-listener" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808178 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerName="proxy-httpd" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808189 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="48199101-c7d2-4881-98bd-53d14d7308d5" containerName="glance-httpd" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808198 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d11b24c2-0ac0-4f23-a575-d1b80db4ba11" containerName="nova-scheduler-scheduler" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808207 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="949ef1d3-9f74-4052-a482-9fea4e48d374" containerName="galera" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808215 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808228 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="367a49cf-488a-4852-8728-78dacbfbd500" containerName="barbican-keystone-listener-log" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808240 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b62b966-7b0a-4099-977c-44682f703187" containerName="openstack-network-exporter" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808253 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b62b966-7b0a-4099-977c-44682f703187" containerName="ovn-northd" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808263 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb882c24-ec9a-4e19-99ac-b6f96c420cb5" containerName="openstack-network-exporter" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808273 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dbd0d4a-660a-4887-83ae-25c00f54196a" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808280 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="f429f6df-3510-4c6a-b9e8-062895218832" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808293 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="202a617e-eb55-4702-8958-3502b6d8e91b" containerName="cinder-api-log" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808306 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerName="ceilometer-notification-agent" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808316 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="account-server" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808326 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b2c99d6-f2e1-4c1c-8825-e8c62d00d133" containerName="ovn-controller" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808394 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="a99712e9-cab7-452c-9df1-d94b5c4d96af" containerName="barbican-api" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808405 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea4a2d0b-62e8-4527-948f-9f9c76070af1" containerName="dnsmasq-dns" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808411 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0befa24-0eda-4f25-9f15-bfb0ebb74e1e" containerName="ovsdbserver-sb" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808423 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e9d4a75-10e4-46dd-9180-821c917a2b5e" containerName="neutron-httpd" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808436 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fec9b72-da40-4d40-9f56-5eca02badaba" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808443 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="78b416b3-3796-4fa3-8a4f-7fa6107d98a1" containerName="rabbitmq" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808453 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e9d4a75-10e4-46dd-9180-821c917a2b5e" containerName="neutron-api" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808465 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" containerName="nova-metadata-log" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808475 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfc6fea8-973e-42c9-9482-a4853abec6c1" containerName="proxy-server" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808487 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb882c24-ec9a-4e19-99ac-b6f96c420cb5" containerName="ovsdbserver-nb" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808495 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerName="ceilometer-central-agent" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808504 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="a99712e9-cab7-452c-9df1-d94b5c4d96af" containerName="barbican-api-log" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808511 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="416ec9f7-82f7-4eb1-a936-51038c6da878" containerName="openstack-network-exporter" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808519 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-server" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808532 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-auditor" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808542 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="27b317a6-1f99-4951-a064-e8ca8a38dc94" containerName="galera" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808553 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="container-updater" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808564 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="container-replicator" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808576 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d035962-836c-48cf-8ea4-a3e5a23f58f9" containerName="rabbitmq" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808589 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="7694f523-adf7-4964-b475-6cd94cac7d75" containerName="nova-cell1-conductor-conductor" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808600 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="account-auditor" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808608 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6c300c8-c9c7-40c3-8874-236b21eb4856" containerName="kube-state-metrics" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808619 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="container-server" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808630 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="swift-recon-cron" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808642 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="865e0a69-0d85-4d93-9d38-f52449d09d87" containerName="barbican-worker" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808653 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a5f12df-98c4-4ab8-af81-e1b8f2067fcc" containerName="nova-metadata-metadata" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808662 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5b765ff-9a5a-48b8-80c7-e8bd56613fcc" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808674 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="857ecb8b-72dc-40a7-a407-d85c40c40bcf" containerName="keystone-api" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808684 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="03838926-8208-43dc-9bfd-6af312a938a4" containerName="nova-cell0-conductor-conductor" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808693 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="efa8eaec-19fb-43da-a1f3-557b0847e966" containerName="nova-api-log" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808704 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="account-replicator" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808714 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfc6fea8-973e-42c9-9482-a4853abec6c1" containerName="proxy-httpd" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808727 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8847d4d-d633-4ca3-90fa-3384e525864d" containerName="sg-core" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808737 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="48199101-c7d2-4881-98bd-53d14d7308d5" containerName="glance-log" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808744 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1099ee9-e4d7-496f-b35e-7617ee456898" containerName="glance-httpd" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808791 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9af12be-6bc5-4aa8-bb84-135e3c0727cb" containerName="nova-cell1-novncproxy-novncproxy" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808802 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="object-updater" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808814 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9fa88d7-1f7a-4cd1-9bee-8823f5fb4a86" containerName="account-reaper" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808825 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bbedff0-5b89-4bbb-b308-6ccb13c8216c" containerName="mariadb-account-delete" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.808838 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="209844a2-e0ac-447f-99f6-28cd864ca648" containerName="ovs-vswitchd" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.810136 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vbpr7" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.826656 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vbpr7"] Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.986201 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2-catalog-content\") pod \"community-operators-vbpr7\" (UID: \"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2\") " pod="openshift-marketplace/community-operators-vbpr7" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.986276 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2-utilities\") pod \"community-operators-vbpr7\" (UID: \"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2\") " pod="openshift-marketplace/community-operators-vbpr7" Dec 10 07:14:06 crc kubenswrapper[4765]: I1210 07:14:06.986645 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79tjb\" (UniqueName: \"kubernetes.io/projected/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2-kube-api-access-79tjb\") pod \"community-operators-vbpr7\" (UID: \"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2\") " pod="openshift-marketplace/community-operators-vbpr7" Dec 10 07:14:07 crc kubenswrapper[4765]: I1210 07:14:07.088313 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79tjb\" (UniqueName: \"kubernetes.io/projected/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2-kube-api-access-79tjb\") pod \"community-operators-vbpr7\" (UID: \"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2\") " pod="openshift-marketplace/community-operators-vbpr7" Dec 10 07:14:07 crc kubenswrapper[4765]: I1210 07:14:07.088399 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2-catalog-content\") pod \"community-operators-vbpr7\" (UID: \"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2\") " pod="openshift-marketplace/community-operators-vbpr7" Dec 10 07:14:07 crc kubenswrapper[4765]: I1210 07:14:07.088423 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2-utilities\") pod \"community-operators-vbpr7\" (UID: \"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2\") " pod="openshift-marketplace/community-operators-vbpr7" Dec 10 07:14:07 crc kubenswrapper[4765]: I1210 07:14:07.089013 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2-utilities\") pod \"community-operators-vbpr7\" (UID: \"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2\") " pod="openshift-marketplace/community-operators-vbpr7" Dec 10 07:14:07 crc kubenswrapper[4765]: I1210 07:14:07.089310 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2-catalog-content\") pod \"community-operators-vbpr7\" (UID: \"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2\") " pod="openshift-marketplace/community-operators-vbpr7" Dec 10 07:14:07 crc kubenswrapper[4765]: I1210 07:14:07.108610 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79tjb\" (UniqueName: \"kubernetes.io/projected/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2-kube-api-access-79tjb\") pod \"community-operators-vbpr7\" (UID: \"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2\") " pod="openshift-marketplace/community-operators-vbpr7" Dec 10 07:14:07 crc kubenswrapper[4765]: I1210 07:14:07.131276 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vbpr7" Dec 10 07:14:07 crc kubenswrapper[4765]: I1210 07:14:07.690995 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vbpr7"] Dec 10 07:14:08 crc kubenswrapper[4765]: I1210 07:14:08.660450 4765 generic.go:334] "Generic (PLEG): container finished" podID="08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2" containerID="8ac5bf4dabf8177de61dacebcca38f50cd5a1d5934443dfe73ea6fc096bf6e66" exitCode=0 Dec 10 07:14:08 crc kubenswrapper[4765]: I1210 07:14:08.660548 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vbpr7" event={"ID":"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2","Type":"ContainerDied","Data":"8ac5bf4dabf8177de61dacebcca38f50cd5a1d5934443dfe73ea6fc096bf6e66"} Dec 10 07:14:08 crc kubenswrapper[4765]: I1210 07:14:08.660785 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vbpr7" event={"ID":"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2","Type":"ContainerStarted","Data":"3eecfa44e498705fb5c6ee1dbf692dda8a849ba9b0c2fc978baa34a69e7fdc5d"} Dec 10 07:14:09 crc kubenswrapper[4765]: I1210 07:14:09.886948 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vbpr7" event={"ID":"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2","Type":"ContainerStarted","Data":"611a3589e9fd1b866ed1d89547bfa7315863ca427d9d506d05ada14c969909ff"} Dec 10 07:14:10 crc kubenswrapper[4765]: I1210 07:14:10.898398 4765 generic.go:334] "Generic (PLEG): container finished" podID="08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2" containerID="611a3589e9fd1b866ed1d89547bfa7315863ca427d9d506d05ada14c969909ff" exitCode=0 Dec 10 07:14:10 crc kubenswrapper[4765]: I1210 07:14:10.898445 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vbpr7" event={"ID":"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2","Type":"ContainerDied","Data":"611a3589e9fd1b866ed1d89547bfa7315863ca427d9d506d05ada14c969909ff"} Dec 10 07:14:11 crc kubenswrapper[4765]: I1210 07:14:11.910466 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vbpr7" event={"ID":"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2","Type":"ContainerStarted","Data":"9486191d5d37b3282820b979c58cbc72b9854272f1c86797f2f7711583dafc3d"} Dec 10 07:14:11 crc kubenswrapper[4765]: I1210 07:14:11.930492 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vbpr7" podStartSLOduration=3.22099134 podStartE2EDuration="5.930470928s" podCreationTimestamp="2025-12-10 07:14:06 +0000 UTC" firstStartedPulling="2025-12-10 07:14:08.662164244 +0000 UTC m=+1568.388829560" lastFinishedPulling="2025-12-10 07:14:11.371643842 +0000 UTC m=+1571.098309148" observedRunningTime="2025-12-10 07:14:11.929052017 +0000 UTC m=+1571.655717333" watchObservedRunningTime="2025-12-10 07:14:11.930470928 +0000 UTC m=+1571.657136244" Dec 10 07:14:17 crc kubenswrapper[4765]: I1210 07:14:17.132268 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vbpr7" Dec 10 07:14:17 crc kubenswrapper[4765]: I1210 07:14:17.132791 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vbpr7" Dec 10 07:14:17 crc kubenswrapper[4765]: I1210 07:14:17.202890 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vbpr7" Dec 10 07:14:17 crc kubenswrapper[4765]: I1210 07:14:17.588820 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:14:17 crc kubenswrapper[4765]: E1210 07:14:17.589386 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:14:17 crc kubenswrapper[4765]: I1210 07:14:17.999371 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vbpr7" Dec 10 07:14:18 crc kubenswrapper[4765]: I1210 07:14:18.042623 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vbpr7"] Dec 10 07:14:19 crc kubenswrapper[4765]: I1210 07:14:19.977374 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vbpr7" podUID="08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2" containerName="registry-server" containerID="cri-o://9486191d5d37b3282820b979c58cbc72b9854272f1c86797f2f7711583dafc3d" gracePeriod=2 Dec 10 07:14:20 crc kubenswrapper[4765]: I1210 07:14:20.989224 4765 generic.go:334] "Generic (PLEG): container finished" podID="08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2" containerID="9486191d5d37b3282820b979c58cbc72b9854272f1c86797f2f7711583dafc3d" exitCode=0 Dec 10 07:14:20 crc kubenswrapper[4765]: I1210 07:14:20.989293 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vbpr7" event={"ID":"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2","Type":"ContainerDied","Data":"9486191d5d37b3282820b979c58cbc72b9854272f1c86797f2f7711583dafc3d"} Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.579801 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vbpr7" Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.699789 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2-utilities\") pod \"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2\" (UID: \"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2\") " Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.699910 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79tjb\" (UniqueName: \"kubernetes.io/projected/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2-kube-api-access-79tjb\") pod \"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2\" (UID: \"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2\") " Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.699973 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2-catalog-content\") pod \"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2\" (UID: \"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2\") " Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.701843 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2-utilities" (OuterVolumeSpecName: "utilities") pod "08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2" (UID: "08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.705826 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2-kube-api-access-79tjb" (OuterVolumeSpecName: "kube-api-access-79tjb") pod "08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2" (UID: "08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2"). InnerVolumeSpecName "kube-api-access-79tjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.757558 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2" (UID: "08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.803996 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.804389 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.804517 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79tjb\" (UniqueName: \"kubernetes.io/projected/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2-kube-api-access-79tjb\") on node \"crc\" DevicePath \"\"" Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.846298 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lvmbk"] Dec 10 07:14:21 crc kubenswrapper[4765]: E1210 07:14:21.846706 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2" containerName="extract-content" Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.846720 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2" containerName="extract-content" Dec 10 07:14:21 crc kubenswrapper[4765]: E1210 07:14:21.846730 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2" containerName="registry-server" Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.846737 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2" containerName="registry-server" Dec 10 07:14:21 crc kubenswrapper[4765]: E1210 07:14:21.846763 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2" containerName="extract-utilities" Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.846770 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2" containerName="extract-utilities" Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.846934 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2" containerName="registry-server" Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.848039 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lvmbk" Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.855813 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lvmbk"] Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.914478 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcbf5\" (UniqueName: \"kubernetes.io/projected/6190a598-1dcf-4923-8505-c64348a7ae65-kube-api-access-xcbf5\") pod \"certified-operators-lvmbk\" (UID: \"6190a598-1dcf-4923-8505-c64348a7ae65\") " pod="openshift-marketplace/certified-operators-lvmbk" Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.914841 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6190a598-1dcf-4923-8505-c64348a7ae65-catalog-content\") pod \"certified-operators-lvmbk\" (UID: \"6190a598-1dcf-4923-8505-c64348a7ae65\") " pod="openshift-marketplace/certified-operators-lvmbk" Dec 10 07:14:21 crc kubenswrapper[4765]: I1210 07:14:21.917779 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6190a598-1dcf-4923-8505-c64348a7ae65-utilities\") pod \"certified-operators-lvmbk\" (UID: \"6190a598-1dcf-4923-8505-c64348a7ae65\") " pod="openshift-marketplace/certified-operators-lvmbk" Dec 10 07:14:22 crc kubenswrapper[4765]: I1210 07:14:22.001372 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vbpr7" event={"ID":"08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2","Type":"ContainerDied","Data":"3eecfa44e498705fb5c6ee1dbf692dda8a849ba9b0c2fc978baa34a69e7fdc5d"} Dec 10 07:14:22 crc kubenswrapper[4765]: I1210 07:14:22.001445 4765 scope.go:117] "RemoveContainer" containerID="9486191d5d37b3282820b979c58cbc72b9854272f1c86797f2f7711583dafc3d" Dec 10 07:14:22 crc kubenswrapper[4765]: I1210 07:14:22.001451 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vbpr7" Dec 10 07:14:22 crc kubenswrapper[4765]: I1210 07:14:22.022072 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcbf5\" (UniqueName: \"kubernetes.io/projected/6190a598-1dcf-4923-8505-c64348a7ae65-kube-api-access-xcbf5\") pod \"certified-operators-lvmbk\" (UID: \"6190a598-1dcf-4923-8505-c64348a7ae65\") " pod="openshift-marketplace/certified-operators-lvmbk" Dec 10 07:14:22 crc kubenswrapper[4765]: I1210 07:14:22.022406 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6190a598-1dcf-4923-8505-c64348a7ae65-catalog-content\") pod \"certified-operators-lvmbk\" (UID: \"6190a598-1dcf-4923-8505-c64348a7ae65\") " pod="openshift-marketplace/certified-operators-lvmbk" Dec 10 07:14:22 crc kubenswrapper[4765]: I1210 07:14:22.022588 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6190a598-1dcf-4923-8505-c64348a7ae65-utilities\") pod \"certified-operators-lvmbk\" (UID: \"6190a598-1dcf-4923-8505-c64348a7ae65\") " pod="openshift-marketplace/certified-operators-lvmbk" Dec 10 07:14:22 crc kubenswrapper[4765]: I1210 07:14:22.025565 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6190a598-1dcf-4923-8505-c64348a7ae65-catalog-content\") pod \"certified-operators-lvmbk\" (UID: \"6190a598-1dcf-4923-8505-c64348a7ae65\") " pod="openshift-marketplace/certified-operators-lvmbk" Dec 10 07:14:22 crc kubenswrapper[4765]: I1210 07:14:22.025999 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6190a598-1dcf-4923-8505-c64348a7ae65-utilities\") pod \"certified-operators-lvmbk\" (UID: \"6190a598-1dcf-4923-8505-c64348a7ae65\") " pod="openshift-marketplace/certified-operators-lvmbk" Dec 10 07:14:22 crc kubenswrapper[4765]: I1210 07:14:22.026115 4765 scope.go:117] "RemoveContainer" containerID="611a3589e9fd1b866ed1d89547bfa7315863ca427d9d506d05ada14c969909ff" Dec 10 07:14:22 crc kubenswrapper[4765]: I1210 07:14:22.044747 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vbpr7"] Dec 10 07:14:22 crc kubenswrapper[4765]: I1210 07:14:22.047041 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcbf5\" (UniqueName: \"kubernetes.io/projected/6190a598-1dcf-4923-8505-c64348a7ae65-kube-api-access-xcbf5\") pod \"certified-operators-lvmbk\" (UID: \"6190a598-1dcf-4923-8505-c64348a7ae65\") " pod="openshift-marketplace/certified-operators-lvmbk" Dec 10 07:14:22 crc kubenswrapper[4765]: I1210 07:14:22.051470 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vbpr7"] Dec 10 07:14:22 crc kubenswrapper[4765]: I1210 07:14:22.067000 4765 scope.go:117] "RemoveContainer" containerID="8ac5bf4dabf8177de61dacebcca38f50cd5a1d5934443dfe73ea6fc096bf6e66" Dec 10 07:14:22 crc kubenswrapper[4765]: I1210 07:14:22.199310 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lvmbk" Dec 10 07:14:22 crc kubenswrapper[4765]: I1210 07:14:22.600423 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2" path="/var/lib/kubelet/pods/08a3e703-d2fb-4d7d-ae10-bd3dd2d8b3f2/volumes" Dec 10 07:14:22 crc kubenswrapper[4765]: I1210 07:14:22.943870 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lvmbk"] Dec 10 07:14:23 crc kubenswrapper[4765]: I1210 07:14:23.021113 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lvmbk" event={"ID":"6190a598-1dcf-4923-8505-c64348a7ae65","Type":"ContainerStarted","Data":"10a05dc7ee2094f53408722285444d806bce9a629f2670fd9eac5571451bb258"} Dec 10 07:14:24 crc kubenswrapper[4765]: I1210 07:14:24.031127 4765 generic.go:334] "Generic (PLEG): container finished" podID="6190a598-1dcf-4923-8505-c64348a7ae65" containerID="a597ea8997978aa20e07eaa65cda7620293878fd701582018261b02e9d3ba264" exitCode=0 Dec 10 07:14:24 crc kubenswrapper[4765]: I1210 07:14:24.031180 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lvmbk" event={"ID":"6190a598-1dcf-4923-8505-c64348a7ae65","Type":"ContainerDied","Data":"a597ea8997978aa20e07eaa65cda7620293878fd701582018261b02e9d3ba264"} Dec 10 07:14:25 crc kubenswrapper[4765]: I1210 07:14:25.044111 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lvmbk" event={"ID":"6190a598-1dcf-4923-8505-c64348a7ae65","Type":"ContainerStarted","Data":"8e757ec8f41224a60ea9f1e3951e0185b7d104f7114bb2edbb495d6d2a2e5e91"} Dec 10 07:14:26 crc kubenswrapper[4765]: I1210 07:14:26.054179 4765 generic.go:334] "Generic (PLEG): container finished" podID="6190a598-1dcf-4923-8505-c64348a7ae65" containerID="8e757ec8f41224a60ea9f1e3951e0185b7d104f7114bb2edbb495d6d2a2e5e91" exitCode=0 Dec 10 07:14:26 crc kubenswrapper[4765]: I1210 07:14:26.054225 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lvmbk" event={"ID":"6190a598-1dcf-4923-8505-c64348a7ae65","Type":"ContainerDied","Data":"8e757ec8f41224a60ea9f1e3951e0185b7d104f7114bb2edbb495d6d2a2e5e91"} Dec 10 07:14:27 crc kubenswrapper[4765]: I1210 07:14:27.167872 4765 scope.go:117] "RemoveContainer" containerID="8f8cddb2be18a038febcc143f2e0f10989133f18e4472abe5e52335b05baf50b" Dec 10 07:14:27 crc kubenswrapper[4765]: I1210 07:14:27.191078 4765 scope.go:117] "RemoveContainer" containerID="77ab2e0016b06d132a4b5d1f9e9e65250792bd9f6a7224bc5a0f2c0925f0b66e" Dec 10 07:14:27 crc kubenswrapper[4765]: I1210 07:14:27.225538 4765 scope.go:117] "RemoveContainer" containerID="f860e2778014e1dafda7ef1de8ff21722953addca2ab6e1293384fc997df2228" Dec 10 07:14:27 crc kubenswrapper[4765]: I1210 07:14:27.273631 4765 scope.go:117] "RemoveContainer" containerID="820d4c5babe9e47d26c867a774b3593a3faf3b4ab2eb66e4d0a76c25b022d470" Dec 10 07:14:27 crc kubenswrapper[4765]: I1210 07:14:27.310342 4765 scope.go:117] "RemoveContainer" containerID="7142c04cc4e57c825e867cdb342a3a40fc9b114254c516d77632e8e8eeccce3b" Dec 10 07:14:27 crc kubenswrapper[4765]: I1210 07:14:27.333935 4765 scope.go:117] "RemoveContainer" containerID="55dfae8913f307f4ff17dd9b36b7ab88c420206a1add647e31c248ea0b272699" Dec 10 07:14:27 crc kubenswrapper[4765]: I1210 07:14:27.361477 4765 scope.go:117] "RemoveContainer" containerID="ae0acddf4b81b4ef12582dae3c37d0db2b9a5b857d5bfe7687182a89daab11ff" Dec 10 07:14:27 crc kubenswrapper[4765]: I1210 07:14:27.404834 4765 scope.go:117] "RemoveContainer" containerID="8883731c27445d441db7a1d384e0ba69d165eb04c1a3efec00afc4870eeec3e5" Dec 10 07:14:27 crc kubenswrapper[4765]: I1210 07:14:27.433830 4765 scope.go:117] "RemoveContainer" containerID="eb6633a896afa14ec87a70d84b7673bb68759a50c37aebfb43aa25d44e6ec694" Dec 10 07:14:27 crc kubenswrapper[4765]: I1210 07:14:27.458193 4765 scope.go:117] "RemoveContainer" containerID="0b9385d3a2b472129ce31b997095c3d9e59316e9f7c4dab74278e1f6448b2654" Dec 10 07:14:27 crc kubenswrapper[4765]: I1210 07:14:27.483144 4765 scope.go:117] "RemoveContainer" containerID="fa5034942a101db0b3408988ffd5a22073166bac86fe65bff4aae4090676f49b" Dec 10 07:14:27 crc kubenswrapper[4765]: I1210 07:14:27.504548 4765 scope.go:117] "RemoveContainer" containerID="0646d1ce4bef47ba9bf9033b093dfa3beee93176dafb3db9996b42d2069ba6ef" Dec 10 07:14:27 crc kubenswrapper[4765]: I1210 07:14:27.523075 4765 scope.go:117] "RemoveContainer" containerID="0720be3be9097ccf18ece7dc254d8edf91ea794a7f29bb4e275248ca1c0e9261" Dec 10 07:14:29 crc kubenswrapper[4765]: I1210 07:14:29.150629 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lvmbk" event={"ID":"6190a598-1dcf-4923-8505-c64348a7ae65","Type":"ContainerStarted","Data":"27dc7e9f07618950e5e915bdb68f3d078a79a8244fdc63cd491782089d91b205"} Dec 10 07:14:29 crc kubenswrapper[4765]: I1210 07:14:29.180260 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lvmbk" podStartSLOduration=4.203834006 podStartE2EDuration="8.180242129s" podCreationTimestamp="2025-12-10 07:14:21 +0000 UTC" firstStartedPulling="2025-12-10 07:14:24.033390204 +0000 UTC m=+1583.760055520" lastFinishedPulling="2025-12-10 07:14:28.009798327 +0000 UTC m=+1587.736463643" observedRunningTime="2025-12-10 07:14:29.171755488 +0000 UTC m=+1588.898420804" watchObservedRunningTime="2025-12-10 07:14:29.180242129 +0000 UTC m=+1588.906907445" Dec 10 07:14:30 crc kubenswrapper[4765]: I1210 07:14:30.593658 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:14:30 crc kubenswrapper[4765]: E1210 07:14:30.593934 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:14:32 crc kubenswrapper[4765]: I1210 07:14:32.200948 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lvmbk" Dec 10 07:14:32 crc kubenswrapper[4765]: I1210 07:14:32.201321 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lvmbk" Dec 10 07:14:32 crc kubenswrapper[4765]: I1210 07:14:32.245720 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lvmbk" Dec 10 07:14:33 crc kubenswrapper[4765]: I1210 07:14:33.233597 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lvmbk" Dec 10 07:14:33 crc kubenswrapper[4765]: I1210 07:14:33.282451 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lvmbk"] Dec 10 07:14:35 crc kubenswrapper[4765]: I1210 07:14:35.198505 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lvmbk" podUID="6190a598-1dcf-4923-8505-c64348a7ae65" containerName="registry-server" containerID="cri-o://27dc7e9f07618950e5e915bdb68f3d078a79a8244fdc63cd491782089d91b205" gracePeriod=2 Dec 10 07:14:36 crc kubenswrapper[4765]: I1210 07:14:36.798671 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lvmbk" Dec 10 07:14:36 crc kubenswrapper[4765]: I1210 07:14:36.949796 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcbf5\" (UniqueName: \"kubernetes.io/projected/6190a598-1dcf-4923-8505-c64348a7ae65-kube-api-access-xcbf5\") pod \"6190a598-1dcf-4923-8505-c64348a7ae65\" (UID: \"6190a598-1dcf-4923-8505-c64348a7ae65\") " Dec 10 07:14:36 crc kubenswrapper[4765]: I1210 07:14:36.950014 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6190a598-1dcf-4923-8505-c64348a7ae65-catalog-content\") pod \"6190a598-1dcf-4923-8505-c64348a7ae65\" (UID: \"6190a598-1dcf-4923-8505-c64348a7ae65\") " Dec 10 07:14:36 crc kubenswrapper[4765]: I1210 07:14:36.950123 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6190a598-1dcf-4923-8505-c64348a7ae65-utilities\") pod \"6190a598-1dcf-4923-8505-c64348a7ae65\" (UID: \"6190a598-1dcf-4923-8505-c64348a7ae65\") " Dec 10 07:14:36 crc kubenswrapper[4765]: I1210 07:14:36.950941 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6190a598-1dcf-4923-8505-c64348a7ae65-utilities" (OuterVolumeSpecName: "utilities") pod "6190a598-1dcf-4923-8505-c64348a7ae65" (UID: "6190a598-1dcf-4923-8505-c64348a7ae65"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:14:36 crc kubenswrapper[4765]: I1210 07:14:36.961784 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6190a598-1dcf-4923-8505-c64348a7ae65-kube-api-access-xcbf5" (OuterVolumeSpecName: "kube-api-access-xcbf5") pod "6190a598-1dcf-4923-8505-c64348a7ae65" (UID: "6190a598-1dcf-4923-8505-c64348a7ae65"). InnerVolumeSpecName "kube-api-access-xcbf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.000586 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6190a598-1dcf-4923-8505-c64348a7ae65-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6190a598-1dcf-4923-8505-c64348a7ae65" (UID: "6190a598-1dcf-4923-8505-c64348a7ae65"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.052169 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6190a598-1dcf-4923-8505-c64348a7ae65-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.052196 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6190a598-1dcf-4923-8505-c64348a7ae65-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.052207 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcbf5\" (UniqueName: \"kubernetes.io/projected/6190a598-1dcf-4923-8505-c64348a7ae65-kube-api-access-xcbf5\") on node \"crc\" DevicePath \"\"" Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.227648 4765 generic.go:334] "Generic (PLEG): container finished" podID="6190a598-1dcf-4923-8505-c64348a7ae65" containerID="27dc7e9f07618950e5e915bdb68f3d078a79a8244fdc63cd491782089d91b205" exitCode=0 Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.227721 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lvmbk" event={"ID":"6190a598-1dcf-4923-8505-c64348a7ae65","Type":"ContainerDied","Data":"27dc7e9f07618950e5e915bdb68f3d078a79a8244fdc63cd491782089d91b205"} Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.227790 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lvmbk" event={"ID":"6190a598-1dcf-4923-8505-c64348a7ae65","Type":"ContainerDied","Data":"10a05dc7ee2094f53408722285444d806bce9a629f2670fd9eac5571451bb258"} Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.227815 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lvmbk" Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.227838 4765 scope.go:117] "RemoveContainer" containerID="27dc7e9f07618950e5e915bdb68f3d078a79a8244fdc63cd491782089d91b205" Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.249613 4765 scope.go:117] "RemoveContainer" containerID="8e757ec8f41224a60ea9f1e3951e0185b7d104f7114bb2edbb495d6d2a2e5e91" Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.269287 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lvmbk"] Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.272200 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lvmbk"] Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.297514 4765 scope.go:117] "RemoveContainer" containerID="a597ea8997978aa20e07eaa65cda7620293878fd701582018261b02e9d3ba264" Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.317242 4765 scope.go:117] "RemoveContainer" containerID="27dc7e9f07618950e5e915bdb68f3d078a79a8244fdc63cd491782089d91b205" Dec 10 07:14:37 crc kubenswrapper[4765]: E1210 07:14:37.317740 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27dc7e9f07618950e5e915bdb68f3d078a79a8244fdc63cd491782089d91b205\": container with ID starting with 27dc7e9f07618950e5e915bdb68f3d078a79a8244fdc63cd491782089d91b205 not found: ID does not exist" containerID="27dc7e9f07618950e5e915bdb68f3d078a79a8244fdc63cd491782089d91b205" Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.317801 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27dc7e9f07618950e5e915bdb68f3d078a79a8244fdc63cd491782089d91b205"} err="failed to get container status \"27dc7e9f07618950e5e915bdb68f3d078a79a8244fdc63cd491782089d91b205\": rpc error: code = NotFound desc = could not find container \"27dc7e9f07618950e5e915bdb68f3d078a79a8244fdc63cd491782089d91b205\": container with ID starting with 27dc7e9f07618950e5e915bdb68f3d078a79a8244fdc63cd491782089d91b205 not found: ID does not exist" Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.317832 4765 scope.go:117] "RemoveContainer" containerID="8e757ec8f41224a60ea9f1e3951e0185b7d104f7114bb2edbb495d6d2a2e5e91" Dec 10 07:14:37 crc kubenswrapper[4765]: E1210 07:14:37.318187 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e757ec8f41224a60ea9f1e3951e0185b7d104f7114bb2edbb495d6d2a2e5e91\": container with ID starting with 8e757ec8f41224a60ea9f1e3951e0185b7d104f7114bb2edbb495d6d2a2e5e91 not found: ID does not exist" containerID="8e757ec8f41224a60ea9f1e3951e0185b7d104f7114bb2edbb495d6d2a2e5e91" Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.318237 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e757ec8f41224a60ea9f1e3951e0185b7d104f7114bb2edbb495d6d2a2e5e91"} err="failed to get container status \"8e757ec8f41224a60ea9f1e3951e0185b7d104f7114bb2edbb495d6d2a2e5e91\": rpc error: code = NotFound desc = could not find container \"8e757ec8f41224a60ea9f1e3951e0185b7d104f7114bb2edbb495d6d2a2e5e91\": container with ID starting with 8e757ec8f41224a60ea9f1e3951e0185b7d104f7114bb2edbb495d6d2a2e5e91 not found: ID does not exist" Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.318277 4765 scope.go:117] "RemoveContainer" containerID="a597ea8997978aa20e07eaa65cda7620293878fd701582018261b02e9d3ba264" Dec 10 07:14:37 crc kubenswrapper[4765]: E1210 07:14:37.318831 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a597ea8997978aa20e07eaa65cda7620293878fd701582018261b02e9d3ba264\": container with ID starting with a597ea8997978aa20e07eaa65cda7620293878fd701582018261b02e9d3ba264 not found: ID does not exist" containerID="a597ea8997978aa20e07eaa65cda7620293878fd701582018261b02e9d3ba264" Dec 10 07:14:37 crc kubenswrapper[4765]: I1210 07:14:37.318869 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a597ea8997978aa20e07eaa65cda7620293878fd701582018261b02e9d3ba264"} err="failed to get container status \"a597ea8997978aa20e07eaa65cda7620293878fd701582018261b02e9d3ba264\": rpc error: code = NotFound desc = could not find container \"a597ea8997978aa20e07eaa65cda7620293878fd701582018261b02e9d3ba264\": container with ID starting with a597ea8997978aa20e07eaa65cda7620293878fd701582018261b02e9d3ba264 not found: ID does not exist" Dec 10 07:14:38 crc kubenswrapper[4765]: I1210 07:14:38.598988 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6190a598-1dcf-4923-8505-c64348a7ae65" path="/var/lib/kubelet/pods/6190a598-1dcf-4923-8505-c64348a7ae65/volumes" Dec 10 07:14:43 crc kubenswrapper[4765]: I1210 07:14:43.588729 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:14:43 crc kubenswrapper[4765]: E1210 07:14:43.589077 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:14:58 crc kubenswrapper[4765]: I1210 07:14:58.589392 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:14:58 crc kubenswrapper[4765]: E1210 07:14:58.590249 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.214719 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx"] Dec 10 07:15:00 crc kubenswrapper[4765]: E1210 07:15:00.215124 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6190a598-1dcf-4923-8505-c64348a7ae65" containerName="extract-content" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.215138 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="6190a598-1dcf-4923-8505-c64348a7ae65" containerName="extract-content" Dec 10 07:15:00 crc kubenswrapper[4765]: E1210 07:15:00.215161 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6190a598-1dcf-4923-8505-c64348a7ae65" containerName="extract-utilities" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.215167 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="6190a598-1dcf-4923-8505-c64348a7ae65" containerName="extract-utilities" Dec 10 07:15:00 crc kubenswrapper[4765]: E1210 07:15:00.215183 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6190a598-1dcf-4923-8505-c64348a7ae65" containerName="registry-server" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.215198 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="6190a598-1dcf-4923-8505-c64348a7ae65" containerName="registry-server" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.215399 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="6190a598-1dcf-4923-8505-c64348a7ae65" containerName="registry-server" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.216065 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.218459 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.218541 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.233309 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx"] Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.285514 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptxmc\" (UniqueName: \"kubernetes.io/projected/9680dd1c-fbb4-4869-b998-f395f4fab06f-kube-api-access-ptxmc\") pod \"collect-profiles-29422515-kbmrx\" (UID: \"9680dd1c-fbb4-4869-b998-f395f4fab06f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.285609 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9680dd1c-fbb4-4869-b998-f395f4fab06f-config-volume\") pod \"collect-profiles-29422515-kbmrx\" (UID: \"9680dd1c-fbb4-4869-b998-f395f4fab06f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.285657 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9680dd1c-fbb4-4869-b998-f395f4fab06f-secret-volume\") pod \"collect-profiles-29422515-kbmrx\" (UID: \"9680dd1c-fbb4-4869-b998-f395f4fab06f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.386311 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9680dd1c-fbb4-4869-b998-f395f4fab06f-secret-volume\") pod \"collect-profiles-29422515-kbmrx\" (UID: \"9680dd1c-fbb4-4869-b998-f395f4fab06f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.386404 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptxmc\" (UniqueName: \"kubernetes.io/projected/9680dd1c-fbb4-4869-b998-f395f4fab06f-kube-api-access-ptxmc\") pod \"collect-profiles-29422515-kbmrx\" (UID: \"9680dd1c-fbb4-4869-b998-f395f4fab06f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.386483 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9680dd1c-fbb4-4869-b998-f395f4fab06f-config-volume\") pod \"collect-profiles-29422515-kbmrx\" (UID: \"9680dd1c-fbb4-4869-b998-f395f4fab06f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.388006 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9680dd1c-fbb4-4869-b998-f395f4fab06f-config-volume\") pod \"collect-profiles-29422515-kbmrx\" (UID: \"9680dd1c-fbb4-4869-b998-f395f4fab06f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.397382 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9680dd1c-fbb4-4869-b998-f395f4fab06f-secret-volume\") pod \"collect-profiles-29422515-kbmrx\" (UID: \"9680dd1c-fbb4-4869-b998-f395f4fab06f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.404253 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptxmc\" (UniqueName: \"kubernetes.io/projected/9680dd1c-fbb4-4869-b998-f395f4fab06f-kube-api-access-ptxmc\") pod \"collect-profiles-29422515-kbmrx\" (UID: \"9680dd1c-fbb4-4869-b998-f395f4fab06f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.539823 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx" Dec 10 07:15:00 crc kubenswrapper[4765]: I1210 07:15:00.769901 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx"] Dec 10 07:15:01 crc kubenswrapper[4765]: I1210 07:15:01.577164 4765 generic.go:334] "Generic (PLEG): container finished" podID="9680dd1c-fbb4-4869-b998-f395f4fab06f" containerID="5bfbb24055fd6246eb60dc3da32841f987f99480396094e3193c4530b8f8d039" exitCode=0 Dec 10 07:15:01 crc kubenswrapper[4765]: I1210 07:15:01.577297 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx" event={"ID":"9680dd1c-fbb4-4869-b998-f395f4fab06f","Type":"ContainerDied","Data":"5bfbb24055fd6246eb60dc3da32841f987f99480396094e3193c4530b8f8d039"} Dec 10 07:15:01 crc kubenswrapper[4765]: I1210 07:15:01.578351 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx" event={"ID":"9680dd1c-fbb4-4869-b998-f395f4fab06f","Type":"ContainerStarted","Data":"1cbf209074c4108eecfe67193fd4d8eecaf1f2a4c570a69bb8fbc9525c7cc4cd"} Dec 10 07:15:02 crc kubenswrapper[4765]: I1210 07:15:02.872560 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx" Dec 10 07:15:03 crc kubenswrapper[4765]: I1210 07:15:03.022261 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptxmc\" (UniqueName: \"kubernetes.io/projected/9680dd1c-fbb4-4869-b998-f395f4fab06f-kube-api-access-ptxmc\") pod \"9680dd1c-fbb4-4869-b998-f395f4fab06f\" (UID: \"9680dd1c-fbb4-4869-b998-f395f4fab06f\") " Dec 10 07:15:03 crc kubenswrapper[4765]: I1210 07:15:03.022345 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9680dd1c-fbb4-4869-b998-f395f4fab06f-secret-volume\") pod \"9680dd1c-fbb4-4869-b998-f395f4fab06f\" (UID: \"9680dd1c-fbb4-4869-b998-f395f4fab06f\") " Dec 10 07:15:03 crc kubenswrapper[4765]: I1210 07:15:03.022432 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9680dd1c-fbb4-4869-b998-f395f4fab06f-config-volume\") pod \"9680dd1c-fbb4-4869-b998-f395f4fab06f\" (UID: \"9680dd1c-fbb4-4869-b998-f395f4fab06f\") " Dec 10 07:15:03 crc kubenswrapper[4765]: I1210 07:15:03.023840 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9680dd1c-fbb4-4869-b998-f395f4fab06f-config-volume" (OuterVolumeSpecName: "config-volume") pod "9680dd1c-fbb4-4869-b998-f395f4fab06f" (UID: "9680dd1c-fbb4-4869-b998-f395f4fab06f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:15:03 crc kubenswrapper[4765]: I1210 07:15:03.027637 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9680dd1c-fbb4-4869-b998-f395f4fab06f-kube-api-access-ptxmc" (OuterVolumeSpecName: "kube-api-access-ptxmc") pod "9680dd1c-fbb4-4869-b998-f395f4fab06f" (UID: "9680dd1c-fbb4-4869-b998-f395f4fab06f"). InnerVolumeSpecName "kube-api-access-ptxmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:15:03 crc kubenswrapper[4765]: I1210 07:15:03.027785 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9680dd1c-fbb4-4869-b998-f395f4fab06f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9680dd1c-fbb4-4869-b998-f395f4fab06f" (UID: "9680dd1c-fbb4-4869-b998-f395f4fab06f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:15:03 crc kubenswrapper[4765]: I1210 07:15:03.124041 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptxmc\" (UniqueName: \"kubernetes.io/projected/9680dd1c-fbb4-4869-b998-f395f4fab06f-kube-api-access-ptxmc\") on node \"crc\" DevicePath \"\"" Dec 10 07:15:03 crc kubenswrapper[4765]: I1210 07:15:03.124076 4765 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9680dd1c-fbb4-4869-b998-f395f4fab06f-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 10 07:15:03 crc kubenswrapper[4765]: I1210 07:15:03.124100 4765 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9680dd1c-fbb4-4869-b998-f395f4fab06f-config-volume\") on node \"crc\" DevicePath \"\"" Dec 10 07:15:03 crc kubenswrapper[4765]: I1210 07:15:03.595008 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx" event={"ID":"9680dd1c-fbb4-4869-b998-f395f4fab06f","Type":"ContainerDied","Data":"1cbf209074c4108eecfe67193fd4d8eecaf1f2a4c570a69bb8fbc9525c7cc4cd"} Dec 10 07:15:03 crc kubenswrapper[4765]: I1210 07:15:03.595334 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1cbf209074c4108eecfe67193fd4d8eecaf1f2a4c570a69bb8fbc9525c7cc4cd" Dec 10 07:15:03 crc kubenswrapper[4765]: I1210 07:15:03.595041 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx" Dec 10 07:15:11 crc kubenswrapper[4765]: I1210 07:15:11.589572 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:15:11 crc kubenswrapper[4765]: E1210 07:15:11.590201 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:15:24 crc kubenswrapper[4765]: I1210 07:15:24.588987 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:15:24 crc kubenswrapper[4765]: E1210 07:15:24.589773 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:15:27 crc kubenswrapper[4765]: I1210 07:15:27.738020 4765 scope.go:117] "RemoveContainer" containerID="e5112d7593a89cdc84e6fce91dc43492c0ab4b3e753938520601247a72b74152" Dec 10 07:15:27 crc kubenswrapper[4765]: I1210 07:15:27.788360 4765 scope.go:117] "RemoveContainer" containerID="cbdc667ae6243dbfd3c0bda0c5b97556a05592e7dbc896f75f92df34258a3bda" Dec 10 07:15:27 crc kubenswrapper[4765]: I1210 07:15:27.809918 4765 scope.go:117] "RemoveContainer" containerID="536abbad4e2842f7a03f4571a7f1c4dd328f7409b95bd63648e17108f89b6c6c" Dec 10 07:15:27 crc kubenswrapper[4765]: I1210 07:15:27.846467 4765 scope.go:117] "RemoveContainer" containerID="c635279a1f2665d647b895514bf3bb2413c60f81a4b49145d0270052ba4ce5ac" Dec 10 07:15:27 crc kubenswrapper[4765]: I1210 07:15:27.868402 4765 scope.go:117] "RemoveContainer" containerID="bac4c71f55ee2170776df4ad5fb3c4f988f9f3170c3b9b43cdcdd7750110de7c" Dec 10 07:15:27 crc kubenswrapper[4765]: I1210 07:15:27.891105 4765 scope.go:117] "RemoveContainer" containerID="ac3689098ac38d2b4d8268641e0b2632892fba929773559138ff9e1b54c7ea38" Dec 10 07:15:27 crc kubenswrapper[4765]: I1210 07:15:27.928346 4765 scope.go:117] "RemoveContainer" containerID="6afc810f2a9aceb67b106d7016d20552f7d41d67ab27474e4b7250b981417bb4" Dec 10 07:15:27 crc kubenswrapper[4765]: I1210 07:15:27.957773 4765 scope.go:117] "RemoveContainer" containerID="33a35f8c3534b6fc13d601dde4481aaa8bb1b8d1e0037c77c68ad4bee8e3a61a" Dec 10 07:15:28 crc kubenswrapper[4765]: I1210 07:15:28.000431 4765 scope.go:117] "RemoveContainer" containerID="20f0b6e27a7b4ff0b68bfdb9cb61de6e57e03e305bce2b157b1fc9b1c6cf96b1" Dec 10 07:15:28 crc kubenswrapper[4765]: I1210 07:15:28.036598 4765 scope.go:117] "RemoveContainer" containerID="bb23aa896aeaa13a3a3aad41ff5bdd9ccdddda01059437a0f632a5e3836f4ccb" Dec 10 07:15:28 crc kubenswrapper[4765]: I1210 07:15:28.060323 4765 scope.go:117] "RemoveContainer" containerID="985c248c95292c43eae0dc35b6f8e1697298363ff6eac21ef9a3b020a947a8f6" Dec 10 07:15:28 crc kubenswrapper[4765]: I1210 07:15:28.088740 4765 scope.go:117] "RemoveContainer" containerID="eb20cff81c1acd614b62ed6f6349b3d083cca964a8dbeea2d401c130ef2da951" Dec 10 07:15:28 crc kubenswrapper[4765]: I1210 07:15:28.109104 4765 scope.go:117] "RemoveContainer" containerID="832d47af8585309a92afe68f7c5380a57965390e6c2e78285a08914067a956d5" Dec 10 07:15:28 crc kubenswrapper[4765]: I1210 07:15:28.125423 4765 scope.go:117] "RemoveContainer" containerID="263abf314f478f45b775bde64ca3c733619ab1cf63ef4298b6c94c3c370e9f3f" Dec 10 07:15:28 crc kubenswrapper[4765]: I1210 07:15:28.144241 4765 scope.go:117] "RemoveContainer" containerID="ebee5b572293df3bad7525159545677a6dfe7871f82acb881e53bcdf150ed6ff" Dec 10 07:15:35 crc kubenswrapper[4765]: I1210 07:15:35.588795 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:15:35 crc kubenswrapper[4765]: E1210 07:15:35.589460 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:15:46 crc kubenswrapper[4765]: I1210 07:15:46.589448 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:15:46 crc kubenswrapper[4765]: E1210 07:15:46.590382 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:15:59 crc kubenswrapper[4765]: I1210 07:15:59.589009 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:15:59 crc kubenswrapper[4765]: E1210 07:15:59.589881 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:16:10 crc kubenswrapper[4765]: I1210 07:16:10.596689 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:16:10 crc kubenswrapper[4765]: E1210 07:16:10.597554 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:16:24 crc kubenswrapper[4765]: I1210 07:16:24.589311 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:16:24 crc kubenswrapper[4765]: E1210 07:16:24.589963 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:16:28 crc kubenswrapper[4765]: I1210 07:16:28.366622 4765 scope.go:117] "RemoveContainer" containerID="007770fdd95bd2e9cdc72b7e466ab66a2ca46ec8421fd2477c7760e27e8beae3" Dec 10 07:16:28 crc kubenswrapper[4765]: I1210 07:16:28.393696 4765 scope.go:117] "RemoveContainer" containerID="4fc9345636a9c7222a93a22391487b0745bc3710f4331dd26db99b3758d3c34b" Dec 10 07:16:28 crc kubenswrapper[4765]: I1210 07:16:28.424102 4765 scope.go:117] "RemoveContainer" containerID="20678e247649d49596783de42d6a2c5262eae48ebad74ca06697392a4449445a" Dec 10 07:16:28 crc kubenswrapper[4765]: I1210 07:16:28.447384 4765 scope.go:117] "RemoveContainer" containerID="2fa399e313ba09c6c2f05ded479e44437ef840c932e96bb4f5e4fda5b0f14797" Dec 10 07:16:28 crc kubenswrapper[4765]: I1210 07:16:28.469966 4765 scope.go:117] "RemoveContainer" containerID="d4796b17aa0ab49644757eca2fad5bcfb573b8c40ef4c6b4b78436f38bd76f16" Dec 10 07:16:28 crc kubenswrapper[4765]: I1210 07:16:28.495303 4765 scope.go:117] "RemoveContainer" containerID="79bef78526d2397a18b4954f88042786fd92ad30f41ebac7f4e0b8dc9f125fef" Dec 10 07:16:28 crc kubenswrapper[4765]: I1210 07:16:28.516248 4765 scope.go:117] "RemoveContainer" containerID="a832827d2751a8011390d3c13351b0e61bd3af97fecc37db2f39502eb67b9861" Dec 10 07:16:28 crc kubenswrapper[4765]: I1210 07:16:28.541275 4765 scope.go:117] "RemoveContainer" containerID="e8fbadda4c283736804707e4d6088058b572b8678b484fe3d73e76e090942c5b" Dec 10 07:16:28 crc kubenswrapper[4765]: I1210 07:16:28.564666 4765 scope.go:117] "RemoveContainer" containerID="52eb1492fe3fa232a7cddaeabaf0c0ba85920a2cd5f59a77e514f89786b23fea" Dec 10 07:16:28 crc kubenswrapper[4765]: I1210 07:16:28.591981 4765 scope.go:117] "RemoveContainer" containerID="d65c3a9331e9da6fed433a21d6cc45087665ff2f8aa551b25b7f1fcf846e9b96" Dec 10 07:16:28 crc kubenswrapper[4765]: I1210 07:16:28.614766 4765 scope.go:117] "RemoveContainer" containerID="4e88a8102447c5fe4da6574043b71cbe14ab7c7a10c5423eb7f9dded6b829962" Dec 10 07:16:28 crc kubenswrapper[4765]: I1210 07:16:28.635522 4765 scope.go:117] "RemoveContainer" containerID="232bf85fe1bdb2e2bc1a622edd4c663477d5437f7f353d0c4045fd6875f71b2b" Dec 10 07:16:35 crc kubenswrapper[4765]: I1210 07:16:35.727856 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pk9sz"] Dec 10 07:16:35 crc kubenswrapper[4765]: E1210 07:16:35.728915 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9680dd1c-fbb4-4869-b998-f395f4fab06f" containerName="collect-profiles" Dec 10 07:16:35 crc kubenswrapper[4765]: I1210 07:16:35.728931 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="9680dd1c-fbb4-4869-b998-f395f4fab06f" containerName="collect-profiles" Dec 10 07:16:35 crc kubenswrapper[4765]: I1210 07:16:35.729119 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="9680dd1c-fbb4-4869-b998-f395f4fab06f" containerName="collect-profiles" Dec 10 07:16:35 crc kubenswrapper[4765]: I1210 07:16:35.737343 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pk9sz" Dec 10 07:16:35 crc kubenswrapper[4765]: I1210 07:16:35.763836 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pk9sz"] Dec 10 07:16:35 crc kubenswrapper[4765]: I1210 07:16:35.830651 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00059246-aaa0-480a-af8c-e5eb59ff20bc-utilities\") pod \"redhat-marketplace-pk9sz\" (UID: \"00059246-aaa0-480a-af8c-e5eb59ff20bc\") " pod="openshift-marketplace/redhat-marketplace-pk9sz" Dec 10 07:16:35 crc kubenswrapper[4765]: I1210 07:16:35.830797 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lp68\" (UniqueName: \"kubernetes.io/projected/00059246-aaa0-480a-af8c-e5eb59ff20bc-kube-api-access-5lp68\") pod \"redhat-marketplace-pk9sz\" (UID: \"00059246-aaa0-480a-af8c-e5eb59ff20bc\") " pod="openshift-marketplace/redhat-marketplace-pk9sz" Dec 10 07:16:35 crc kubenswrapper[4765]: I1210 07:16:35.830820 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00059246-aaa0-480a-af8c-e5eb59ff20bc-catalog-content\") pod \"redhat-marketplace-pk9sz\" (UID: \"00059246-aaa0-480a-af8c-e5eb59ff20bc\") " pod="openshift-marketplace/redhat-marketplace-pk9sz" Dec 10 07:16:35 crc kubenswrapper[4765]: I1210 07:16:35.931757 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lp68\" (UniqueName: \"kubernetes.io/projected/00059246-aaa0-480a-af8c-e5eb59ff20bc-kube-api-access-5lp68\") pod \"redhat-marketplace-pk9sz\" (UID: \"00059246-aaa0-480a-af8c-e5eb59ff20bc\") " pod="openshift-marketplace/redhat-marketplace-pk9sz" Dec 10 07:16:35 crc kubenswrapper[4765]: I1210 07:16:35.932167 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00059246-aaa0-480a-af8c-e5eb59ff20bc-catalog-content\") pod \"redhat-marketplace-pk9sz\" (UID: \"00059246-aaa0-480a-af8c-e5eb59ff20bc\") " pod="openshift-marketplace/redhat-marketplace-pk9sz" Dec 10 07:16:35 crc kubenswrapper[4765]: I1210 07:16:35.932443 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00059246-aaa0-480a-af8c-e5eb59ff20bc-utilities\") pod \"redhat-marketplace-pk9sz\" (UID: \"00059246-aaa0-480a-af8c-e5eb59ff20bc\") " pod="openshift-marketplace/redhat-marketplace-pk9sz" Dec 10 07:16:35 crc kubenswrapper[4765]: I1210 07:16:35.932744 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00059246-aaa0-480a-af8c-e5eb59ff20bc-catalog-content\") pod \"redhat-marketplace-pk9sz\" (UID: \"00059246-aaa0-480a-af8c-e5eb59ff20bc\") " pod="openshift-marketplace/redhat-marketplace-pk9sz" Dec 10 07:16:35 crc kubenswrapper[4765]: I1210 07:16:35.932893 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00059246-aaa0-480a-af8c-e5eb59ff20bc-utilities\") pod \"redhat-marketplace-pk9sz\" (UID: \"00059246-aaa0-480a-af8c-e5eb59ff20bc\") " pod="openshift-marketplace/redhat-marketplace-pk9sz" Dec 10 07:16:35 crc kubenswrapper[4765]: I1210 07:16:35.957377 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lp68\" (UniqueName: \"kubernetes.io/projected/00059246-aaa0-480a-af8c-e5eb59ff20bc-kube-api-access-5lp68\") pod \"redhat-marketplace-pk9sz\" (UID: \"00059246-aaa0-480a-af8c-e5eb59ff20bc\") " pod="openshift-marketplace/redhat-marketplace-pk9sz" Dec 10 07:16:36 crc kubenswrapper[4765]: I1210 07:16:36.057081 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pk9sz" Dec 10 07:16:36 crc kubenswrapper[4765]: I1210 07:16:36.485055 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pk9sz"] Dec 10 07:16:37 crc kubenswrapper[4765]: I1210 07:16:37.393713 4765 generic.go:334] "Generic (PLEG): container finished" podID="00059246-aaa0-480a-af8c-e5eb59ff20bc" containerID="27e43762fb874660818930daa26cf8ad3e5c3a9866d3d1521d6d6878383a559e" exitCode=0 Dec 10 07:16:37 crc kubenswrapper[4765]: I1210 07:16:37.393820 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pk9sz" event={"ID":"00059246-aaa0-480a-af8c-e5eb59ff20bc","Type":"ContainerDied","Data":"27e43762fb874660818930daa26cf8ad3e5c3a9866d3d1521d6d6878383a559e"} Dec 10 07:16:37 crc kubenswrapper[4765]: I1210 07:16:37.394008 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pk9sz" event={"ID":"00059246-aaa0-480a-af8c-e5eb59ff20bc","Type":"ContainerStarted","Data":"eb61c5c11bbd013803a94efe40b0dc75e2f0c2fe3979104778278840797af0dc"} Dec 10 07:16:37 crc kubenswrapper[4765]: I1210 07:16:37.395440 4765 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 07:16:38 crc kubenswrapper[4765]: I1210 07:16:38.404890 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pk9sz" event={"ID":"00059246-aaa0-480a-af8c-e5eb59ff20bc","Type":"ContainerStarted","Data":"4cda987967cc4591587ec5ff3e985f09563383c004bd84625eac8aa909bebf49"} Dec 10 07:16:39 crc kubenswrapper[4765]: I1210 07:16:39.415001 4765 generic.go:334] "Generic (PLEG): container finished" podID="00059246-aaa0-480a-af8c-e5eb59ff20bc" containerID="4cda987967cc4591587ec5ff3e985f09563383c004bd84625eac8aa909bebf49" exitCode=0 Dec 10 07:16:39 crc kubenswrapper[4765]: I1210 07:16:39.415077 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pk9sz" event={"ID":"00059246-aaa0-480a-af8c-e5eb59ff20bc","Type":"ContainerDied","Data":"4cda987967cc4591587ec5ff3e985f09563383c004bd84625eac8aa909bebf49"} Dec 10 07:16:39 crc kubenswrapper[4765]: I1210 07:16:39.588671 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:16:39 crc kubenswrapper[4765]: E1210 07:16:39.589186 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:16:40 crc kubenswrapper[4765]: I1210 07:16:40.427033 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pk9sz" event={"ID":"00059246-aaa0-480a-af8c-e5eb59ff20bc","Type":"ContainerStarted","Data":"cabd670e5f1f422c30cf0b7d26269464db07fe8b6671d0f650b7aaedcda47f74"} Dec 10 07:16:40 crc kubenswrapper[4765]: I1210 07:16:40.447812 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pk9sz" podStartSLOduration=3.03307523 podStartE2EDuration="5.447786149s" podCreationTimestamp="2025-12-10 07:16:35 +0000 UTC" firstStartedPulling="2025-12-10 07:16:37.395052538 +0000 UTC m=+1717.121717854" lastFinishedPulling="2025-12-10 07:16:39.809763457 +0000 UTC m=+1719.536428773" observedRunningTime="2025-12-10 07:16:40.442944842 +0000 UTC m=+1720.169610168" watchObservedRunningTime="2025-12-10 07:16:40.447786149 +0000 UTC m=+1720.174451465" Dec 10 07:16:46 crc kubenswrapper[4765]: I1210 07:16:46.058303 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pk9sz" Dec 10 07:16:46 crc kubenswrapper[4765]: I1210 07:16:46.058826 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pk9sz" Dec 10 07:16:46 crc kubenswrapper[4765]: I1210 07:16:46.103001 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pk9sz" Dec 10 07:16:46 crc kubenswrapper[4765]: I1210 07:16:46.660941 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pk9sz" Dec 10 07:16:46 crc kubenswrapper[4765]: I1210 07:16:46.709989 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pk9sz"] Dec 10 07:16:48 crc kubenswrapper[4765]: I1210 07:16:48.676985 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pk9sz" podUID="00059246-aaa0-480a-af8c-e5eb59ff20bc" containerName="registry-server" containerID="cri-o://cabd670e5f1f422c30cf0b7d26269464db07fe8b6671d0f650b7aaedcda47f74" gracePeriod=2 Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.591418 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pk9sz" Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.668466 4765 generic.go:334] "Generic (PLEG): container finished" podID="00059246-aaa0-480a-af8c-e5eb59ff20bc" containerID="cabd670e5f1f422c30cf0b7d26269464db07fe8b6671d0f650b7aaedcda47f74" exitCode=0 Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.668518 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pk9sz" event={"ID":"00059246-aaa0-480a-af8c-e5eb59ff20bc","Type":"ContainerDied","Data":"cabd670e5f1f422c30cf0b7d26269464db07fe8b6671d0f650b7aaedcda47f74"} Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.668550 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pk9sz" event={"ID":"00059246-aaa0-480a-af8c-e5eb59ff20bc","Type":"ContainerDied","Data":"eb61c5c11bbd013803a94efe40b0dc75e2f0c2fe3979104778278840797af0dc"} Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.668548 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pk9sz" Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.668569 4765 scope.go:117] "RemoveContainer" containerID="cabd670e5f1f422c30cf0b7d26269464db07fe8b6671d0f650b7aaedcda47f74" Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.684867 4765 scope.go:117] "RemoveContainer" containerID="4cda987967cc4591587ec5ff3e985f09563383c004bd84625eac8aa909bebf49" Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.699429 4765 scope.go:117] "RemoveContainer" containerID="27e43762fb874660818930daa26cf8ad3e5c3a9866d3d1521d6d6878383a559e" Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.723842 4765 scope.go:117] "RemoveContainer" containerID="cabd670e5f1f422c30cf0b7d26269464db07fe8b6671d0f650b7aaedcda47f74" Dec 10 07:16:49 crc kubenswrapper[4765]: E1210 07:16:49.724469 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cabd670e5f1f422c30cf0b7d26269464db07fe8b6671d0f650b7aaedcda47f74\": container with ID starting with cabd670e5f1f422c30cf0b7d26269464db07fe8b6671d0f650b7aaedcda47f74 not found: ID does not exist" containerID="cabd670e5f1f422c30cf0b7d26269464db07fe8b6671d0f650b7aaedcda47f74" Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.724507 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cabd670e5f1f422c30cf0b7d26269464db07fe8b6671d0f650b7aaedcda47f74"} err="failed to get container status \"cabd670e5f1f422c30cf0b7d26269464db07fe8b6671d0f650b7aaedcda47f74\": rpc error: code = NotFound desc = could not find container \"cabd670e5f1f422c30cf0b7d26269464db07fe8b6671d0f650b7aaedcda47f74\": container with ID starting with cabd670e5f1f422c30cf0b7d26269464db07fe8b6671d0f650b7aaedcda47f74 not found: ID does not exist" Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.724533 4765 scope.go:117] "RemoveContainer" containerID="4cda987967cc4591587ec5ff3e985f09563383c004bd84625eac8aa909bebf49" Dec 10 07:16:49 crc kubenswrapper[4765]: E1210 07:16:49.724875 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4cda987967cc4591587ec5ff3e985f09563383c004bd84625eac8aa909bebf49\": container with ID starting with 4cda987967cc4591587ec5ff3e985f09563383c004bd84625eac8aa909bebf49 not found: ID does not exist" containerID="4cda987967cc4591587ec5ff3e985f09563383c004bd84625eac8aa909bebf49" Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.724900 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cda987967cc4591587ec5ff3e985f09563383c004bd84625eac8aa909bebf49"} err="failed to get container status \"4cda987967cc4591587ec5ff3e985f09563383c004bd84625eac8aa909bebf49\": rpc error: code = NotFound desc = could not find container \"4cda987967cc4591587ec5ff3e985f09563383c004bd84625eac8aa909bebf49\": container with ID starting with 4cda987967cc4591587ec5ff3e985f09563383c004bd84625eac8aa909bebf49 not found: ID does not exist" Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.724923 4765 scope.go:117] "RemoveContainer" containerID="27e43762fb874660818930daa26cf8ad3e5c3a9866d3d1521d6d6878383a559e" Dec 10 07:16:49 crc kubenswrapper[4765]: E1210 07:16:49.725216 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27e43762fb874660818930daa26cf8ad3e5c3a9866d3d1521d6d6878383a559e\": container with ID starting with 27e43762fb874660818930daa26cf8ad3e5c3a9866d3d1521d6d6878383a559e not found: ID does not exist" containerID="27e43762fb874660818930daa26cf8ad3e5c3a9866d3d1521d6d6878383a559e" Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.725245 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27e43762fb874660818930daa26cf8ad3e5c3a9866d3d1521d6d6878383a559e"} err="failed to get container status \"27e43762fb874660818930daa26cf8ad3e5c3a9866d3d1521d6d6878383a559e\": rpc error: code = NotFound desc = could not find container \"27e43762fb874660818930daa26cf8ad3e5c3a9866d3d1521d6d6878383a559e\": container with ID starting with 27e43762fb874660818930daa26cf8ad3e5c3a9866d3d1521d6d6878383a559e not found: ID does not exist" Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.727921 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lp68\" (UniqueName: \"kubernetes.io/projected/00059246-aaa0-480a-af8c-e5eb59ff20bc-kube-api-access-5lp68\") pod \"00059246-aaa0-480a-af8c-e5eb59ff20bc\" (UID: \"00059246-aaa0-480a-af8c-e5eb59ff20bc\") " Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.727956 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00059246-aaa0-480a-af8c-e5eb59ff20bc-utilities\") pod \"00059246-aaa0-480a-af8c-e5eb59ff20bc\" (UID: \"00059246-aaa0-480a-af8c-e5eb59ff20bc\") " Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.728865 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00059246-aaa0-480a-af8c-e5eb59ff20bc-utilities" (OuterVolumeSpecName: "utilities") pod "00059246-aaa0-480a-af8c-e5eb59ff20bc" (UID: "00059246-aaa0-480a-af8c-e5eb59ff20bc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.728941 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00059246-aaa0-480a-af8c-e5eb59ff20bc-catalog-content\") pod \"00059246-aaa0-480a-af8c-e5eb59ff20bc\" (UID: \"00059246-aaa0-480a-af8c-e5eb59ff20bc\") " Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.729264 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00059246-aaa0-480a-af8c-e5eb59ff20bc-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.734078 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00059246-aaa0-480a-af8c-e5eb59ff20bc-kube-api-access-5lp68" (OuterVolumeSpecName: "kube-api-access-5lp68") pod "00059246-aaa0-480a-af8c-e5eb59ff20bc" (UID: "00059246-aaa0-480a-af8c-e5eb59ff20bc"). InnerVolumeSpecName "kube-api-access-5lp68". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.751388 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00059246-aaa0-480a-af8c-e5eb59ff20bc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "00059246-aaa0-480a-af8c-e5eb59ff20bc" (UID: "00059246-aaa0-480a-af8c-e5eb59ff20bc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.830454 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lp68\" (UniqueName: \"kubernetes.io/projected/00059246-aaa0-480a-af8c-e5eb59ff20bc-kube-api-access-5lp68\") on node \"crc\" DevicePath \"\"" Dec 10 07:16:49 crc kubenswrapper[4765]: I1210 07:16:49.830508 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00059246-aaa0-480a-af8c-e5eb59ff20bc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:16:50 crc kubenswrapper[4765]: I1210 07:16:50.007267 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pk9sz"] Dec 10 07:16:50 crc kubenswrapper[4765]: I1210 07:16:50.014364 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pk9sz"] Dec 10 07:16:50 crc kubenswrapper[4765]: I1210 07:16:50.598851 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00059246-aaa0-480a-af8c-e5eb59ff20bc" path="/var/lib/kubelet/pods/00059246-aaa0-480a-af8c-e5eb59ff20bc/volumes" Dec 10 07:16:52 crc kubenswrapper[4765]: I1210 07:16:52.588617 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:16:52 crc kubenswrapper[4765]: E1210 07:16:52.588939 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:17:06 crc kubenswrapper[4765]: I1210 07:17:06.589711 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:17:06 crc kubenswrapper[4765]: E1210 07:17:06.591538 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:17:19 crc kubenswrapper[4765]: I1210 07:17:19.589436 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:17:19 crc kubenswrapper[4765]: E1210 07:17:19.590735 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:17:28 crc kubenswrapper[4765]: I1210 07:17:28.801636 4765 scope.go:117] "RemoveContainer" containerID="df242b7c160deac47abd6b9541c5e53d3fd8d19239aa5942bb1b6bc35af53e76" Dec 10 07:17:28 crc kubenswrapper[4765]: I1210 07:17:28.825031 4765 scope.go:117] "RemoveContainer" containerID="9d9b5a141c2983815efdaebc1eb8fd5678d1eca528232df16cb4e7e0e7ca55f0" Dec 10 07:17:28 crc kubenswrapper[4765]: I1210 07:17:28.883522 4765 scope.go:117] "RemoveContainer" containerID="c6917a3810c494b02e66b935f5dd67320cea45af7e76831145b9736da74e9429" Dec 10 07:17:28 crc kubenswrapper[4765]: I1210 07:17:28.918272 4765 scope.go:117] "RemoveContainer" containerID="cb48d7619048e7367222e91b29eb3e701b0b9b6088b3e223019ff2e827511da1" Dec 10 07:17:30 crc kubenswrapper[4765]: I1210 07:17:30.594004 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:17:30 crc kubenswrapper[4765]: E1210 07:17:30.594272 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:17:42 crc kubenswrapper[4765]: I1210 07:17:42.588780 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:17:42 crc kubenswrapper[4765]: E1210 07:17:42.589596 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:17:57 crc kubenswrapper[4765]: I1210 07:17:57.588789 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:17:57 crc kubenswrapper[4765]: E1210 07:17:57.589497 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:18:11 crc kubenswrapper[4765]: I1210 07:18:11.589374 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:18:11 crc kubenswrapper[4765]: E1210 07:18:11.590203 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:18:26 crc kubenswrapper[4765]: I1210 07:18:26.589845 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:18:26 crc kubenswrapper[4765]: E1210 07:18:26.590637 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:18:29 crc kubenswrapper[4765]: I1210 07:18:29.046964 4765 scope.go:117] "RemoveContainer" containerID="b256bba4f0b940452d87c1efa2568615b300a7d4a10c4b13e4fec042905b6c14" Dec 10 07:18:37 crc kubenswrapper[4765]: I1210 07:18:37.589715 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:18:37 crc kubenswrapper[4765]: E1210 07:18:37.590224 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:18:51 crc kubenswrapper[4765]: I1210 07:18:51.588912 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:18:51 crc kubenswrapper[4765]: E1210 07:18:51.589511 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:19:03 crc kubenswrapper[4765]: I1210 07:19:03.589632 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:19:03 crc kubenswrapper[4765]: E1210 07:19:03.590329 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:19:16 crc kubenswrapper[4765]: I1210 07:19:16.588979 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:19:16 crc kubenswrapper[4765]: I1210 07:19:16.797485 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"a74333ecfa6249aca625258a4eaba829d0f074ed18a38454142781e00eca7b94"} Dec 10 07:19:29 crc kubenswrapper[4765]: I1210 07:19:29.128847 4765 scope.go:117] "RemoveContainer" containerID="818398cac57dcefcb64574fce10496d462c9aa4895bf95a9491256d4561e080a" Dec 10 07:19:29 crc kubenswrapper[4765]: I1210 07:19:29.148810 4765 scope.go:117] "RemoveContainer" containerID="a85ff52f8967b342b29d29d59b703382c1f6ef448d73456ffa0f05292be1900c" Dec 10 07:19:29 crc kubenswrapper[4765]: I1210 07:19:29.177476 4765 scope.go:117] "RemoveContainer" containerID="ca5af5ea3d591a6d2f15cf44977e743548d34c8e91df4bdf5bad6f8c7094295d" Dec 10 07:19:29 crc kubenswrapper[4765]: I1210 07:19:29.205048 4765 scope.go:117] "RemoveContainer" containerID="32b6a76ccd1daa4a50f1524fe483981c2e3cd65443b53c4cd6b98ae3db49899e" Dec 10 07:19:29 crc kubenswrapper[4765]: I1210 07:19:29.231176 4765 scope.go:117] "RemoveContainer" containerID="fa9b039bec586c60153cc47099fde851491eb2e2c1af97a51854e0c46850f414" Dec 10 07:19:29 crc kubenswrapper[4765]: I1210 07:19:29.265185 4765 scope.go:117] "RemoveContainer" containerID="7a879c78d6d7c70bb65a60b88c1cb601644349aa93bf05f1fc08e3e60d091cea" Dec 10 07:21:34 crc kubenswrapper[4765]: I1210 07:21:34.050219 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:21:34 crc kubenswrapper[4765]: I1210 07:21:34.050812 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:22:04 crc kubenswrapper[4765]: I1210 07:22:04.050397 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:22:04 crc kubenswrapper[4765]: I1210 07:22:04.052236 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:22:04 crc kubenswrapper[4765]: I1210 07:22:04.831804 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zz797"] Dec 10 07:22:04 crc kubenswrapper[4765]: E1210 07:22:04.832705 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00059246-aaa0-480a-af8c-e5eb59ff20bc" containerName="extract-content" Dec 10 07:22:04 crc kubenswrapper[4765]: I1210 07:22:04.832721 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="00059246-aaa0-480a-af8c-e5eb59ff20bc" containerName="extract-content" Dec 10 07:22:04 crc kubenswrapper[4765]: E1210 07:22:04.832759 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00059246-aaa0-480a-af8c-e5eb59ff20bc" containerName="extract-utilities" Dec 10 07:22:04 crc kubenswrapper[4765]: I1210 07:22:04.832766 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="00059246-aaa0-480a-af8c-e5eb59ff20bc" containerName="extract-utilities" Dec 10 07:22:04 crc kubenswrapper[4765]: E1210 07:22:04.832775 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00059246-aaa0-480a-af8c-e5eb59ff20bc" containerName="registry-server" Dec 10 07:22:04 crc kubenswrapper[4765]: I1210 07:22:04.832784 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="00059246-aaa0-480a-af8c-e5eb59ff20bc" containerName="registry-server" Dec 10 07:22:04 crc kubenswrapper[4765]: I1210 07:22:04.832993 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="00059246-aaa0-480a-af8c-e5eb59ff20bc" containerName="registry-server" Dec 10 07:22:04 crc kubenswrapper[4765]: I1210 07:22:04.834370 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zz797" Dec 10 07:22:04 crc kubenswrapper[4765]: I1210 07:22:04.841049 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zz797"] Dec 10 07:22:05 crc kubenswrapper[4765]: I1210 07:22:05.011566 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d171dd5-7a70-48de-bfc9-dee5535fc6ca-utilities\") pod \"redhat-operators-zz797\" (UID: \"3d171dd5-7a70-48de-bfc9-dee5535fc6ca\") " pod="openshift-marketplace/redhat-operators-zz797" Dec 10 07:22:05 crc kubenswrapper[4765]: I1210 07:22:05.011638 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d171dd5-7a70-48de-bfc9-dee5535fc6ca-catalog-content\") pod \"redhat-operators-zz797\" (UID: \"3d171dd5-7a70-48de-bfc9-dee5535fc6ca\") " pod="openshift-marketplace/redhat-operators-zz797" Dec 10 07:22:05 crc kubenswrapper[4765]: I1210 07:22:05.013125 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czln4\" (UniqueName: \"kubernetes.io/projected/3d171dd5-7a70-48de-bfc9-dee5535fc6ca-kube-api-access-czln4\") pod \"redhat-operators-zz797\" (UID: \"3d171dd5-7a70-48de-bfc9-dee5535fc6ca\") " pod="openshift-marketplace/redhat-operators-zz797" Dec 10 07:22:05 crc kubenswrapper[4765]: I1210 07:22:05.114069 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d171dd5-7a70-48de-bfc9-dee5535fc6ca-utilities\") pod \"redhat-operators-zz797\" (UID: \"3d171dd5-7a70-48de-bfc9-dee5535fc6ca\") " pod="openshift-marketplace/redhat-operators-zz797" Dec 10 07:22:05 crc kubenswrapper[4765]: I1210 07:22:05.114140 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d171dd5-7a70-48de-bfc9-dee5535fc6ca-catalog-content\") pod \"redhat-operators-zz797\" (UID: \"3d171dd5-7a70-48de-bfc9-dee5535fc6ca\") " pod="openshift-marketplace/redhat-operators-zz797" Dec 10 07:22:05 crc kubenswrapper[4765]: I1210 07:22:05.114159 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czln4\" (UniqueName: \"kubernetes.io/projected/3d171dd5-7a70-48de-bfc9-dee5535fc6ca-kube-api-access-czln4\") pod \"redhat-operators-zz797\" (UID: \"3d171dd5-7a70-48de-bfc9-dee5535fc6ca\") " pod="openshift-marketplace/redhat-operators-zz797" Dec 10 07:22:05 crc kubenswrapper[4765]: I1210 07:22:05.114823 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d171dd5-7a70-48de-bfc9-dee5535fc6ca-utilities\") pod \"redhat-operators-zz797\" (UID: \"3d171dd5-7a70-48de-bfc9-dee5535fc6ca\") " pod="openshift-marketplace/redhat-operators-zz797" Dec 10 07:22:05 crc kubenswrapper[4765]: I1210 07:22:05.114860 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d171dd5-7a70-48de-bfc9-dee5535fc6ca-catalog-content\") pod \"redhat-operators-zz797\" (UID: \"3d171dd5-7a70-48de-bfc9-dee5535fc6ca\") " pod="openshift-marketplace/redhat-operators-zz797" Dec 10 07:22:05 crc kubenswrapper[4765]: I1210 07:22:05.137070 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czln4\" (UniqueName: \"kubernetes.io/projected/3d171dd5-7a70-48de-bfc9-dee5535fc6ca-kube-api-access-czln4\") pod \"redhat-operators-zz797\" (UID: \"3d171dd5-7a70-48de-bfc9-dee5535fc6ca\") " pod="openshift-marketplace/redhat-operators-zz797" Dec 10 07:22:05 crc kubenswrapper[4765]: I1210 07:22:05.171759 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zz797" Dec 10 07:22:05 crc kubenswrapper[4765]: I1210 07:22:05.628353 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zz797"] Dec 10 07:22:05 crc kubenswrapper[4765]: I1210 07:22:05.780852 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zz797" event={"ID":"3d171dd5-7a70-48de-bfc9-dee5535fc6ca","Type":"ContainerStarted","Data":"12655c04a755de323b7a8d5a667b292e1203480747bab1d9a6bb9e0247599ba2"} Dec 10 07:22:06 crc kubenswrapper[4765]: I1210 07:22:06.790299 4765 generic.go:334] "Generic (PLEG): container finished" podID="3d171dd5-7a70-48de-bfc9-dee5535fc6ca" containerID="8633297b00c35faadea3b3d880719d14e12dc7c03c5638f3b4d3c41bc23c51c0" exitCode=0 Dec 10 07:22:06 crc kubenswrapper[4765]: I1210 07:22:06.790430 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zz797" event={"ID":"3d171dd5-7a70-48de-bfc9-dee5535fc6ca","Type":"ContainerDied","Data":"8633297b00c35faadea3b3d880719d14e12dc7c03c5638f3b4d3c41bc23c51c0"} Dec 10 07:22:06 crc kubenswrapper[4765]: I1210 07:22:06.792216 4765 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 07:22:07 crc kubenswrapper[4765]: I1210 07:22:07.798623 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zz797" event={"ID":"3d171dd5-7a70-48de-bfc9-dee5535fc6ca","Type":"ContainerStarted","Data":"e35a067d69ca8bc14379c2ac8772809fe7ce36bde52163f3395dad1e46fd18de"} Dec 10 07:22:08 crc kubenswrapper[4765]: I1210 07:22:08.809384 4765 generic.go:334] "Generic (PLEG): container finished" podID="3d171dd5-7a70-48de-bfc9-dee5535fc6ca" containerID="e35a067d69ca8bc14379c2ac8772809fe7ce36bde52163f3395dad1e46fd18de" exitCode=0 Dec 10 07:22:08 crc kubenswrapper[4765]: I1210 07:22:08.809462 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zz797" event={"ID":"3d171dd5-7a70-48de-bfc9-dee5535fc6ca","Type":"ContainerDied","Data":"e35a067d69ca8bc14379c2ac8772809fe7ce36bde52163f3395dad1e46fd18de"} Dec 10 07:22:09 crc kubenswrapper[4765]: I1210 07:22:09.818882 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zz797" event={"ID":"3d171dd5-7a70-48de-bfc9-dee5535fc6ca","Type":"ContainerStarted","Data":"10d2d53f94d1e17b5082568505661d4e77125e5a007860823398a6258a2c19d2"} Dec 10 07:22:09 crc kubenswrapper[4765]: I1210 07:22:09.841267 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zz797" podStartSLOduration=3.218740692 podStartE2EDuration="5.84123738s" podCreationTimestamp="2025-12-10 07:22:04 +0000 UTC" firstStartedPulling="2025-12-10 07:22:06.791901504 +0000 UTC m=+2046.518566810" lastFinishedPulling="2025-12-10 07:22:09.414398192 +0000 UTC m=+2049.141063498" observedRunningTime="2025-12-10 07:22:09.835595179 +0000 UTC m=+2049.562260515" watchObservedRunningTime="2025-12-10 07:22:09.84123738 +0000 UTC m=+2049.567902696" Dec 10 07:22:15 crc kubenswrapper[4765]: I1210 07:22:15.173075 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zz797" Dec 10 07:22:15 crc kubenswrapper[4765]: I1210 07:22:15.173552 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zz797" Dec 10 07:22:15 crc kubenswrapper[4765]: I1210 07:22:15.221560 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zz797" Dec 10 07:22:15 crc kubenswrapper[4765]: I1210 07:22:15.911696 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zz797" Dec 10 07:22:15 crc kubenswrapper[4765]: I1210 07:22:15.950229 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zz797"] Dec 10 07:22:17 crc kubenswrapper[4765]: I1210 07:22:17.888007 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zz797" podUID="3d171dd5-7a70-48de-bfc9-dee5535fc6ca" containerName="registry-server" containerID="cri-o://10d2d53f94d1e17b5082568505661d4e77125e5a007860823398a6258a2c19d2" gracePeriod=2 Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.537235 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zz797" Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.702118 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d171dd5-7a70-48de-bfc9-dee5535fc6ca-catalog-content\") pod \"3d171dd5-7a70-48de-bfc9-dee5535fc6ca\" (UID: \"3d171dd5-7a70-48de-bfc9-dee5535fc6ca\") " Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.702198 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d171dd5-7a70-48de-bfc9-dee5535fc6ca-utilities\") pod \"3d171dd5-7a70-48de-bfc9-dee5535fc6ca\" (UID: \"3d171dd5-7a70-48de-bfc9-dee5535fc6ca\") " Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.702309 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czln4\" (UniqueName: \"kubernetes.io/projected/3d171dd5-7a70-48de-bfc9-dee5535fc6ca-kube-api-access-czln4\") pod \"3d171dd5-7a70-48de-bfc9-dee5535fc6ca\" (UID: \"3d171dd5-7a70-48de-bfc9-dee5535fc6ca\") " Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.703375 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d171dd5-7a70-48de-bfc9-dee5535fc6ca-utilities" (OuterVolumeSpecName: "utilities") pod "3d171dd5-7a70-48de-bfc9-dee5535fc6ca" (UID: "3d171dd5-7a70-48de-bfc9-dee5535fc6ca"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.708618 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d171dd5-7a70-48de-bfc9-dee5535fc6ca-kube-api-access-czln4" (OuterVolumeSpecName: "kube-api-access-czln4") pod "3d171dd5-7a70-48de-bfc9-dee5535fc6ca" (UID: "3d171dd5-7a70-48de-bfc9-dee5535fc6ca"). InnerVolumeSpecName "kube-api-access-czln4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.804644 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czln4\" (UniqueName: \"kubernetes.io/projected/3d171dd5-7a70-48de-bfc9-dee5535fc6ca-kube-api-access-czln4\") on node \"crc\" DevicePath \"\"" Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.804692 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d171dd5-7a70-48de-bfc9-dee5535fc6ca-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.899541 4765 generic.go:334] "Generic (PLEG): container finished" podID="3d171dd5-7a70-48de-bfc9-dee5535fc6ca" containerID="10d2d53f94d1e17b5082568505661d4e77125e5a007860823398a6258a2c19d2" exitCode=0 Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.899612 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zz797" event={"ID":"3d171dd5-7a70-48de-bfc9-dee5535fc6ca","Type":"ContainerDied","Data":"10d2d53f94d1e17b5082568505661d4e77125e5a007860823398a6258a2c19d2"} Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.899730 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zz797" event={"ID":"3d171dd5-7a70-48de-bfc9-dee5535fc6ca","Type":"ContainerDied","Data":"12655c04a755de323b7a8d5a667b292e1203480747bab1d9a6bb9e0247599ba2"} Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.899757 4765 scope.go:117] "RemoveContainer" containerID="10d2d53f94d1e17b5082568505661d4e77125e5a007860823398a6258a2c19d2" Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.899661 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zz797" Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.920782 4765 scope.go:117] "RemoveContainer" containerID="e35a067d69ca8bc14379c2ac8772809fe7ce36bde52163f3395dad1e46fd18de" Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.939184 4765 scope.go:117] "RemoveContainer" containerID="8633297b00c35faadea3b3d880719d14e12dc7c03c5638f3b4d3c41bc23c51c0" Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.960591 4765 scope.go:117] "RemoveContainer" containerID="10d2d53f94d1e17b5082568505661d4e77125e5a007860823398a6258a2c19d2" Dec 10 07:22:18 crc kubenswrapper[4765]: E1210 07:22:18.961156 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10d2d53f94d1e17b5082568505661d4e77125e5a007860823398a6258a2c19d2\": container with ID starting with 10d2d53f94d1e17b5082568505661d4e77125e5a007860823398a6258a2c19d2 not found: ID does not exist" containerID="10d2d53f94d1e17b5082568505661d4e77125e5a007860823398a6258a2c19d2" Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.961202 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10d2d53f94d1e17b5082568505661d4e77125e5a007860823398a6258a2c19d2"} err="failed to get container status \"10d2d53f94d1e17b5082568505661d4e77125e5a007860823398a6258a2c19d2\": rpc error: code = NotFound desc = could not find container \"10d2d53f94d1e17b5082568505661d4e77125e5a007860823398a6258a2c19d2\": container with ID starting with 10d2d53f94d1e17b5082568505661d4e77125e5a007860823398a6258a2c19d2 not found: ID does not exist" Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.961232 4765 scope.go:117] "RemoveContainer" containerID="e35a067d69ca8bc14379c2ac8772809fe7ce36bde52163f3395dad1e46fd18de" Dec 10 07:22:18 crc kubenswrapper[4765]: E1210 07:22:18.961584 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e35a067d69ca8bc14379c2ac8772809fe7ce36bde52163f3395dad1e46fd18de\": container with ID starting with e35a067d69ca8bc14379c2ac8772809fe7ce36bde52163f3395dad1e46fd18de not found: ID does not exist" containerID="e35a067d69ca8bc14379c2ac8772809fe7ce36bde52163f3395dad1e46fd18de" Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.961606 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e35a067d69ca8bc14379c2ac8772809fe7ce36bde52163f3395dad1e46fd18de"} err="failed to get container status \"e35a067d69ca8bc14379c2ac8772809fe7ce36bde52163f3395dad1e46fd18de\": rpc error: code = NotFound desc = could not find container \"e35a067d69ca8bc14379c2ac8772809fe7ce36bde52163f3395dad1e46fd18de\": container with ID starting with e35a067d69ca8bc14379c2ac8772809fe7ce36bde52163f3395dad1e46fd18de not found: ID does not exist" Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.961622 4765 scope.go:117] "RemoveContainer" containerID="8633297b00c35faadea3b3d880719d14e12dc7c03c5638f3b4d3c41bc23c51c0" Dec 10 07:22:18 crc kubenswrapper[4765]: E1210 07:22:18.961988 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8633297b00c35faadea3b3d880719d14e12dc7c03c5638f3b4d3c41bc23c51c0\": container with ID starting with 8633297b00c35faadea3b3d880719d14e12dc7c03c5638f3b4d3c41bc23c51c0 not found: ID does not exist" containerID="8633297b00c35faadea3b3d880719d14e12dc7c03c5638f3b4d3c41bc23c51c0" Dec 10 07:22:18 crc kubenswrapper[4765]: I1210 07:22:18.962035 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8633297b00c35faadea3b3d880719d14e12dc7c03c5638f3b4d3c41bc23c51c0"} err="failed to get container status \"8633297b00c35faadea3b3d880719d14e12dc7c03c5638f3b4d3c41bc23c51c0\": rpc error: code = NotFound desc = could not find container \"8633297b00c35faadea3b3d880719d14e12dc7c03c5638f3b4d3c41bc23c51c0\": container with ID starting with 8633297b00c35faadea3b3d880719d14e12dc7c03c5638f3b4d3c41bc23c51c0 not found: ID does not exist" Dec 10 07:22:19 crc kubenswrapper[4765]: I1210 07:22:19.172691 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d171dd5-7a70-48de-bfc9-dee5535fc6ca-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3d171dd5-7a70-48de-bfc9-dee5535fc6ca" (UID: "3d171dd5-7a70-48de-bfc9-dee5535fc6ca"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:22:19 crc kubenswrapper[4765]: I1210 07:22:19.211167 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d171dd5-7a70-48de-bfc9-dee5535fc6ca-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:22:19 crc kubenswrapper[4765]: I1210 07:22:19.241067 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zz797"] Dec 10 07:22:19 crc kubenswrapper[4765]: I1210 07:22:19.253054 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zz797"] Dec 10 07:22:20 crc kubenswrapper[4765]: I1210 07:22:20.598486 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d171dd5-7a70-48de-bfc9-dee5535fc6ca" path="/var/lib/kubelet/pods/3d171dd5-7a70-48de-bfc9-dee5535fc6ca/volumes" Dec 10 07:22:34 crc kubenswrapper[4765]: I1210 07:22:34.049767 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:22:34 crc kubenswrapper[4765]: I1210 07:22:34.050305 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:22:34 crc kubenswrapper[4765]: I1210 07:22:34.050356 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 07:22:34 crc kubenswrapper[4765]: I1210 07:22:34.050950 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a74333ecfa6249aca625258a4eaba829d0f074ed18a38454142781e00eca7b94"} pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 07:22:34 crc kubenswrapper[4765]: I1210 07:22:34.051010 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" containerID="cri-o://a74333ecfa6249aca625258a4eaba829d0f074ed18a38454142781e00eca7b94" gracePeriod=600 Dec 10 07:22:35 crc kubenswrapper[4765]: I1210 07:22:35.016289 4765 generic.go:334] "Generic (PLEG): container finished" podID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerID="a74333ecfa6249aca625258a4eaba829d0f074ed18a38454142781e00eca7b94" exitCode=0 Dec 10 07:22:35 crc kubenswrapper[4765]: I1210 07:22:35.016353 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerDied","Data":"a74333ecfa6249aca625258a4eaba829d0f074ed18a38454142781e00eca7b94"} Dec 10 07:22:35 crc kubenswrapper[4765]: I1210 07:22:35.016899 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f"} Dec 10 07:22:35 crc kubenswrapper[4765]: I1210 07:22:35.016953 4765 scope.go:117] "RemoveContainer" containerID="35f338e29cdd6c3805d6d77f17aa7e466fc654565b237ace116d984a2541b083" Dec 10 07:24:34 crc kubenswrapper[4765]: I1210 07:24:34.049363 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:24:34 crc kubenswrapper[4765]: I1210 07:24:34.049948 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:24:43 crc kubenswrapper[4765]: I1210 07:24:43.829565 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bdvtq"] Dec 10 07:24:43 crc kubenswrapper[4765]: E1210 07:24:43.832473 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d171dd5-7a70-48de-bfc9-dee5535fc6ca" containerName="extract-utilities" Dec 10 07:24:43 crc kubenswrapper[4765]: I1210 07:24:43.832502 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d171dd5-7a70-48de-bfc9-dee5535fc6ca" containerName="extract-utilities" Dec 10 07:24:43 crc kubenswrapper[4765]: E1210 07:24:43.832524 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d171dd5-7a70-48de-bfc9-dee5535fc6ca" containerName="extract-content" Dec 10 07:24:43 crc kubenswrapper[4765]: I1210 07:24:43.832532 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d171dd5-7a70-48de-bfc9-dee5535fc6ca" containerName="extract-content" Dec 10 07:24:43 crc kubenswrapper[4765]: E1210 07:24:43.832582 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d171dd5-7a70-48de-bfc9-dee5535fc6ca" containerName="registry-server" Dec 10 07:24:43 crc kubenswrapper[4765]: I1210 07:24:43.832595 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d171dd5-7a70-48de-bfc9-dee5535fc6ca" containerName="registry-server" Dec 10 07:24:43 crc kubenswrapper[4765]: I1210 07:24:43.832797 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d171dd5-7a70-48de-bfc9-dee5535fc6ca" containerName="registry-server" Dec 10 07:24:43 crc kubenswrapper[4765]: I1210 07:24:43.833996 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bdvtq" Dec 10 07:24:43 crc kubenswrapper[4765]: I1210 07:24:43.844551 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bdvtq"] Dec 10 07:24:43 crc kubenswrapper[4765]: I1210 07:24:43.941802 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bx5h4\" (UniqueName: \"kubernetes.io/projected/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01-kube-api-access-bx5h4\") pod \"community-operators-bdvtq\" (UID: \"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01\") " pod="openshift-marketplace/community-operators-bdvtq" Dec 10 07:24:43 crc kubenswrapper[4765]: I1210 07:24:43.941876 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01-catalog-content\") pod \"community-operators-bdvtq\" (UID: \"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01\") " pod="openshift-marketplace/community-operators-bdvtq" Dec 10 07:24:43 crc kubenswrapper[4765]: I1210 07:24:43.941939 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01-utilities\") pod \"community-operators-bdvtq\" (UID: \"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01\") " pod="openshift-marketplace/community-operators-bdvtq" Dec 10 07:24:44 crc kubenswrapper[4765]: I1210 07:24:44.043485 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bx5h4\" (UniqueName: \"kubernetes.io/projected/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01-kube-api-access-bx5h4\") pod \"community-operators-bdvtq\" (UID: \"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01\") " pod="openshift-marketplace/community-operators-bdvtq" Dec 10 07:24:44 crc kubenswrapper[4765]: I1210 07:24:44.043556 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01-catalog-content\") pod \"community-operators-bdvtq\" (UID: \"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01\") " pod="openshift-marketplace/community-operators-bdvtq" Dec 10 07:24:44 crc kubenswrapper[4765]: I1210 07:24:44.043614 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01-utilities\") pod \"community-operators-bdvtq\" (UID: \"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01\") " pod="openshift-marketplace/community-operators-bdvtq" Dec 10 07:24:44 crc kubenswrapper[4765]: I1210 07:24:44.044160 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01-utilities\") pod \"community-operators-bdvtq\" (UID: \"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01\") " pod="openshift-marketplace/community-operators-bdvtq" Dec 10 07:24:44 crc kubenswrapper[4765]: I1210 07:24:44.044265 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01-catalog-content\") pod \"community-operators-bdvtq\" (UID: \"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01\") " pod="openshift-marketplace/community-operators-bdvtq" Dec 10 07:24:44 crc kubenswrapper[4765]: I1210 07:24:44.067580 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bx5h4\" (UniqueName: \"kubernetes.io/projected/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01-kube-api-access-bx5h4\") pod \"community-operators-bdvtq\" (UID: \"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01\") " pod="openshift-marketplace/community-operators-bdvtq" Dec 10 07:24:44 crc kubenswrapper[4765]: I1210 07:24:44.155548 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bdvtq" Dec 10 07:24:44 crc kubenswrapper[4765]: I1210 07:24:44.636658 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bdvtq"] Dec 10 07:24:44 crc kubenswrapper[4765]: I1210 07:24:44.940134 4765 generic.go:334] "Generic (PLEG): container finished" podID="e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01" containerID="b1dabc85133250e0e0a2fcb6fb2007fa6131638110337e42736c9b83d78e8988" exitCode=0 Dec 10 07:24:44 crc kubenswrapper[4765]: I1210 07:24:44.940209 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdvtq" event={"ID":"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01","Type":"ContainerDied","Data":"b1dabc85133250e0e0a2fcb6fb2007fa6131638110337e42736c9b83d78e8988"} Dec 10 07:24:44 crc kubenswrapper[4765]: I1210 07:24:44.940356 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdvtq" event={"ID":"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01","Type":"ContainerStarted","Data":"8a6c2f3deb1bde6372225ed3073d9e3843e61b7aea9d9d78161c0c8e8a3ada7c"} Dec 10 07:24:45 crc kubenswrapper[4765]: I1210 07:24:45.951067 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdvtq" event={"ID":"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01","Type":"ContainerStarted","Data":"9b16474de53af8574365b2bc87c7ec22fb7c67a67651b87135a9d3e2edcd5a6f"} Dec 10 07:24:46 crc kubenswrapper[4765]: I1210 07:24:46.810683 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qnwfr"] Dec 10 07:24:46 crc kubenswrapper[4765]: I1210 07:24:46.812305 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnwfr" Dec 10 07:24:46 crc kubenswrapper[4765]: I1210 07:24:46.825609 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qnwfr"] Dec 10 07:24:46 crc kubenswrapper[4765]: I1210 07:24:46.959788 4765 generic.go:334] "Generic (PLEG): container finished" podID="e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01" containerID="9b16474de53af8574365b2bc87c7ec22fb7c67a67651b87135a9d3e2edcd5a6f" exitCode=0 Dec 10 07:24:46 crc kubenswrapper[4765]: I1210 07:24:46.959880 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdvtq" event={"ID":"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01","Type":"ContainerDied","Data":"9b16474de53af8574365b2bc87c7ec22fb7c67a67651b87135a9d3e2edcd5a6f"} Dec 10 07:24:46 crc kubenswrapper[4765]: I1210 07:24:46.990389 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cea562f8-02be-4f64-b0b4-d8cfcb05d5af-utilities\") pod \"certified-operators-qnwfr\" (UID: \"cea562f8-02be-4f64-b0b4-d8cfcb05d5af\") " pod="openshift-marketplace/certified-operators-qnwfr" Dec 10 07:24:46 crc kubenswrapper[4765]: I1210 07:24:46.990491 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrksq\" (UniqueName: \"kubernetes.io/projected/cea562f8-02be-4f64-b0b4-d8cfcb05d5af-kube-api-access-jrksq\") pod \"certified-operators-qnwfr\" (UID: \"cea562f8-02be-4f64-b0b4-d8cfcb05d5af\") " pod="openshift-marketplace/certified-operators-qnwfr" Dec 10 07:24:46 crc kubenswrapper[4765]: I1210 07:24:46.990551 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cea562f8-02be-4f64-b0b4-d8cfcb05d5af-catalog-content\") pod \"certified-operators-qnwfr\" (UID: \"cea562f8-02be-4f64-b0b4-d8cfcb05d5af\") " pod="openshift-marketplace/certified-operators-qnwfr" Dec 10 07:24:47 crc kubenswrapper[4765]: I1210 07:24:47.092496 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cea562f8-02be-4f64-b0b4-d8cfcb05d5af-utilities\") pod \"certified-operators-qnwfr\" (UID: \"cea562f8-02be-4f64-b0b4-d8cfcb05d5af\") " pod="openshift-marketplace/certified-operators-qnwfr" Dec 10 07:24:47 crc kubenswrapper[4765]: I1210 07:24:47.093511 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cea562f8-02be-4f64-b0b4-d8cfcb05d5af-utilities\") pod \"certified-operators-qnwfr\" (UID: \"cea562f8-02be-4f64-b0b4-d8cfcb05d5af\") " pod="openshift-marketplace/certified-operators-qnwfr" Dec 10 07:24:47 crc kubenswrapper[4765]: I1210 07:24:47.093757 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrksq\" (UniqueName: \"kubernetes.io/projected/cea562f8-02be-4f64-b0b4-d8cfcb05d5af-kube-api-access-jrksq\") pod \"certified-operators-qnwfr\" (UID: \"cea562f8-02be-4f64-b0b4-d8cfcb05d5af\") " pod="openshift-marketplace/certified-operators-qnwfr" Dec 10 07:24:47 crc kubenswrapper[4765]: I1210 07:24:47.093851 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cea562f8-02be-4f64-b0b4-d8cfcb05d5af-catalog-content\") pod \"certified-operators-qnwfr\" (UID: \"cea562f8-02be-4f64-b0b4-d8cfcb05d5af\") " pod="openshift-marketplace/certified-operators-qnwfr" Dec 10 07:24:47 crc kubenswrapper[4765]: I1210 07:24:47.094155 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cea562f8-02be-4f64-b0b4-d8cfcb05d5af-catalog-content\") pod \"certified-operators-qnwfr\" (UID: \"cea562f8-02be-4f64-b0b4-d8cfcb05d5af\") " pod="openshift-marketplace/certified-operators-qnwfr" Dec 10 07:24:47 crc kubenswrapper[4765]: I1210 07:24:47.114401 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrksq\" (UniqueName: \"kubernetes.io/projected/cea562f8-02be-4f64-b0b4-d8cfcb05d5af-kube-api-access-jrksq\") pod \"certified-operators-qnwfr\" (UID: \"cea562f8-02be-4f64-b0b4-d8cfcb05d5af\") " pod="openshift-marketplace/certified-operators-qnwfr" Dec 10 07:24:47 crc kubenswrapper[4765]: I1210 07:24:47.132158 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnwfr" Dec 10 07:24:47 crc kubenswrapper[4765]: I1210 07:24:47.585397 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qnwfr"] Dec 10 07:24:47 crc kubenswrapper[4765]: W1210 07:24:47.590215 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcea562f8_02be_4f64_b0b4_d8cfcb05d5af.slice/crio-1cca5b3ef389c4f1c073ffd55ec5618f736815e1beceafb27c14d8cd4d18c7cd WatchSource:0}: Error finding container 1cca5b3ef389c4f1c073ffd55ec5618f736815e1beceafb27c14d8cd4d18c7cd: Status 404 returned error can't find the container with id 1cca5b3ef389c4f1c073ffd55ec5618f736815e1beceafb27c14d8cd4d18c7cd Dec 10 07:24:47 crc kubenswrapper[4765]: I1210 07:24:47.970858 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdvtq" event={"ID":"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01","Type":"ContainerStarted","Data":"9afaaa1b2f346b3e3b607fce63717d16a8ae3d106196229c702bfb6b46371af7"} Dec 10 07:24:47 crc kubenswrapper[4765]: I1210 07:24:47.972814 4765 generic.go:334] "Generic (PLEG): container finished" podID="cea562f8-02be-4f64-b0b4-d8cfcb05d5af" containerID="afd50889a097b13db41f24eb614130a13e377e479d4fa3cb62ae6f8b362ad4b2" exitCode=0 Dec 10 07:24:47 crc kubenswrapper[4765]: I1210 07:24:47.972860 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnwfr" event={"ID":"cea562f8-02be-4f64-b0b4-d8cfcb05d5af","Type":"ContainerDied","Data":"afd50889a097b13db41f24eb614130a13e377e479d4fa3cb62ae6f8b362ad4b2"} Dec 10 07:24:47 crc kubenswrapper[4765]: I1210 07:24:47.972887 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnwfr" event={"ID":"cea562f8-02be-4f64-b0b4-d8cfcb05d5af","Type":"ContainerStarted","Data":"1cca5b3ef389c4f1c073ffd55ec5618f736815e1beceafb27c14d8cd4d18c7cd"} Dec 10 07:24:47 crc kubenswrapper[4765]: I1210 07:24:47.992480 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bdvtq" podStartSLOduration=2.498066672 podStartE2EDuration="4.992436709s" podCreationTimestamp="2025-12-10 07:24:43 +0000 UTC" firstStartedPulling="2025-12-10 07:24:44.94219822 +0000 UTC m=+2204.668863536" lastFinishedPulling="2025-12-10 07:24:47.436568257 +0000 UTC m=+2207.163233573" observedRunningTime="2025-12-10 07:24:47.990868844 +0000 UTC m=+2207.717534180" watchObservedRunningTime="2025-12-10 07:24:47.992436709 +0000 UTC m=+2207.719102025" Dec 10 07:24:48 crc kubenswrapper[4765]: I1210 07:24:48.982913 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnwfr" event={"ID":"cea562f8-02be-4f64-b0b4-d8cfcb05d5af","Type":"ContainerStarted","Data":"6c1d962f784180357dc1b00d776b6132e817a17ff34803f58cfff6b61074da1a"} Dec 10 07:24:49 crc kubenswrapper[4765]: I1210 07:24:49.993035 4765 generic.go:334] "Generic (PLEG): container finished" podID="cea562f8-02be-4f64-b0b4-d8cfcb05d5af" containerID="6c1d962f784180357dc1b00d776b6132e817a17ff34803f58cfff6b61074da1a" exitCode=0 Dec 10 07:24:49 crc kubenswrapper[4765]: I1210 07:24:49.993105 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnwfr" event={"ID":"cea562f8-02be-4f64-b0b4-d8cfcb05d5af","Type":"ContainerDied","Data":"6c1d962f784180357dc1b00d776b6132e817a17ff34803f58cfff6b61074da1a"} Dec 10 07:24:51 crc kubenswrapper[4765]: I1210 07:24:51.003254 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnwfr" event={"ID":"cea562f8-02be-4f64-b0b4-d8cfcb05d5af","Type":"ContainerStarted","Data":"5f83d1d9b6003c5cc8d5df1b337ee31535a382d42e7bd39d607ac1a3c3fcdd3c"} Dec 10 07:24:51 crc kubenswrapper[4765]: I1210 07:24:51.019876 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qnwfr" podStartSLOduration=2.567744824 podStartE2EDuration="5.019853587s" podCreationTimestamp="2025-12-10 07:24:46 +0000 UTC" firstStartedPulling="2025-12-10 07:24:47.975255049 +0000 UTC m=+2207.701920365" lastFinishedPulling="2025-12-10 07:24:50.427363812 +0000 UTC m=+2210.154029128" observedRunningTime="2025-12-10 07:24:51.019211279 +0000 UTC m=+2210.745876595" watchObservedRunningTime="2025-12-10 07:24:51.019853587 +0000 UTC m=+2210.746518903" Dec 10 07:24:54 crc kubenswrapper[4765]: I1210 07:24:54.156530 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bdvtq" Dec 10 07:24:54 crc kubenswrapper[4765]: I1210 07:24:54.156870 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bdvtq" Dec 10 07:24:54 crc kubenswrapper[4765]: I1210 07:24:54.204306 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bdvtq" Dec 10 07:24:55 crc kubenswrapper[4765]: I1210 07:24:55.076245 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bdvtq" Dec 10 07:24:55 crc kubenswrapper[4765]: I1210 07:24:55.402031 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bdvtq"] Dec 10 07:24:57 crc kubenswrapper[4765]: I1210 07:24:57.049028 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bdvtq" podUID="e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01" containerName="registry-server" containerID="cri-o://9afaaa1b2f346b3e3b607fce63717d16a8ae3d106196229c702bfb6b46371af7" gracePeriod=2 Dec 10 07:24:57 crc kubenswrapper[4765]: I1210 07:24:57.133337 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qnwfr" Dec 10 07:24:57 crc kubenswrapper[4765]: I1210 07:24:57.133392 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qnwfr" Dec 10 07:24:57 crc kubenswrapper[4765]: I1210 07:24:57.177960 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qnwfr" Dec 10 07:24:58 crc kubenswrapper[4765]: I1210 07:24:58.100967 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qnwfr" Dec 10 07:24:58 crc kubenswrapper[4765]: I1210 07:24:58.536047 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bdvtq" Dec 10 07:24:58 crc kubenswrapper[4765]: I1210 07:24:58.632937 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01-utilities\") pod \"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01\" (UID: \"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01\") " Dec 10 07:24:58 crc kubenswrapper[4765]: I1210 07:24:58.633058 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bx5h4\" (UniqueName: \"kubernetes.io/projected/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01-kube-api-access-bx5h4\") pod \"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01\" (UID: \"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01\") " Dec 10 07:24:58 crc kubenswrapper[4765]: I1210 07:24:58.634101 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01-utilities" (OuterVolumeSpecName: "utilities") pod "e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01" (UID: "e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:24:58 crc kubenswrapper[4765]: I1210 07:24:58.634315 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01-catalog-content\") pod \"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01\" (UID: \"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01\") " Dec 10 07:24:58 crc kubenswrapper[4765]: I1210 07:24:58.634687 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:24:58 crc kubenswrapper[4765]: I1210 07:24:58.644411 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01-kube-api-access-bx5h4" (OuterVolumeSpecName: "kube-api-access-bx5h4") pod "e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01" (UID: "e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01"). InnerVolumeSpecName "kube-api-access-bx5h4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:24:58 crc kubenswrapper[4765]: I1210 07:24:58.688338 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01" (UID: "e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:24:58 crc kubenswrapper[4765]: I1210 07:24:58.736557 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bx5h4\" (UniqueName: \"kubernetes.io/projected/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01-kube-api-access-bx5h4\") on node \"crc\" DevicePath \"\"" Dec 10 07:24:58 crc kubenswrapper[4765]: I1210 07:24:58.736848 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:24:59 crc kubenswrapper[4765]: I1210 07:24:59.002547 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qnwfr"] Dec 10 07:24:59 crc kubenswrapper[4765]: I1210 07:24:59.067269 4765 generic.go:334] "Generic (PLEG): container finished" podID="e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01" containerID="9afaaa1b2f346b3e3b607fce63717d16a8ae3d106196229c702bfb6b46371af7" exitCode=0 Dec 10 07:24:59 crc kubenswrapper[4765]: I1210 07:24:59.067359 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdvtq" event={"ID":"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01","Type":"ContainerDied","Data":"9afaaa1b2f346b3e3b607fce63717d16a8ae3d106196229c702bfb6b46371af7"} Dec 10 07:24:59 crc kubenswrapper[4765]: I1210 07:24:59.067374 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bdvtq" Dec 10 07:24:59 crc kubenswrapper[4765]: I1210 07:24:59.067397 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdvtq" event={"ID":"e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01","Type":"ContainerDied","Data":"8a6c2f3deb1bde6372225ed3073d9e3843e61b7aea9d9d78161c0c8e8a3ada7c"} Dec 10 07:24:59 crc kubenswrapper[4765]: I1210 07:24:59.067415 4765 scope.go:117] "RemoveContainer" containerID="9afaaa1b2f346b3e3b607fce63717d16a8ae3d106196229c702bfb6b46371af7" Dec 10 07:24:59 crc kubenswrapper[4765]: I1210 07:24:59.090747 4765 scope.go:117] "RemoveContainer" containerID="9b16474de53af8574365b2bc87c7ec22fb7c67a67651b87135a9d3e2edcd5a6f" Dec 10 07:24:59 crc kubenswrapper[4765]: I1210 07:24:59.111643 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bdvtq"] Dec 10 07:24:59 crc kubenswrapper[4765]: I1210 07:24:59.119689 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bdvtq"] Dec 10 07:24:59 crc kubenswrapper[4765]: I1210 07:24:59.128257 4765 scope.go:117] "RemoveContainer" containerID="b1dabc85133250e0e0a2fcb6fb2007fa6131638110337e42736c9b83d78e8988" Dec 10 07:24:59 crc kubenswrapper[4765]: I1210 07:24:59.147033 4765 scope.go:117] "RemoveContainer" containerID="9afaaa1b2f346b3e3b607fce63717d16a8ae3d106196229c702bfb6b46371af7" Dec 10 07:24:59 crc kubenswrapper[4765]: E1210 07:24:59.147630 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9afaaa1b2f346b3e3b607fce63717d16a8ae3d106196229c702bfb6b46371af7\": container with ID starting with 9afaaa1b2f346b3e3b607fce63717d16a8ae3d106196229c702bfb6b46371af7 not found: ID does not exist" containerID="9afaaa1b2f346b3e3b607fce63717d16a8ae3d106196229c702bfb6b46371af7" Dec 10 07:24:59 crc kubenswrapper[4765]: I1210 07:24:59.147684 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9afaaa1b2f346b3e3b607fce63717d16a8ae3d106196229c702bfb6b46371af7"} err="failed to get container status \"9afaaa1b2f346b3e3b607fce63717d16a8ae3d106196229c702bfb6b46371af7\": rpc error: code = NotFound desc = could not find container \"9afaaa1b2f346b3e3b607fce63717d16a8ae3d106196229c702bfb6b46371af7\": container with ID starting with 9afaaa1b2f346b3e3b607fce63717d16a8ae3d106196229c702bfb6b46371af7 not found: ID does not exist" Dec 10 07:24:59 crc kubenswrapper[4765]: I1210 07:24:59.147720 4765 scope.go:117] "RemoveContainer" containerID="9b16474de53af8574365b2bc87c7ec22fb7c67a67651b87135a9d3e2edcd5a6f" Dec 10 07:24:59 crc kubenswrapper[4765]: E1210 07:24:59.148016 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b16474de53af8574365b2bc87c7ec22fb7c67a67651b87135a9d3e2edcd5a6f\": container with ID starting with 9b16474de53af8574365b2bc87c7ec22fb7c67a67651b87135a9d3e2edcd5a6f not found: ID does not exist" containerID="9b16474de53af8574365b2bc87c7ec22fb7c67a67651b87135a9d3e2edcd5a6f" Dec 10 07:24:59 crc kubenswrapper[4765]: I1210 07:24:59.148044 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b16474de53af8574365b2bc87c7ec22fb7c67a67651b87135a9d3e2edcd5a6f"} err="failed to get container status \"9b16474de53af8574365b2bc87c7ec22fb7c67a67651b87135a9d3e2edcd5a6f\": rpc error: code = NotFound desc = could not find container \"9b16474de53af8574365b2bc87c7ec22fb7c67a67651b87135a9d3e2edcd5a6f\": container with ID starting with 9b16474de53af8574365b2bc87c7ec22fb7c67a67651b87135a9d3e2edcd5a6f not found: ID does not exist" Dec 10 07:24:59 crc kubenswrapper[4765]: I1210 07:24:59.148061 4765 scope.go:117] "RemoveContainer" containerID="b1dabc85133250e0e0a2fcb6fb2007fa6131638110337e42736c9b83d78e8988" Dec 10 07:24:59 crc kubenswrapper[4765]: E1210 07:24:59.148357 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1dabc85133250e0e0a2fcb6fb2007fa6131638110337e42736c9b83d78e8988\": container with ID starting with b1dabc85133250e0e0a2fcb6fb2007fa6131638110337e42736c9b83d78e8988 not found: ID does not exist" containerID="b1dabc85133250e0e0a2fcb6fb2007fa6131638110337e42736c9b83d78e8988" Dec 10 07:24:59 crc kubenswrapper[4765]: I1210 07:24:59.148380 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1dabc85133250e0e0a2fcb6fb2007fa6131638110337e42736c9b83d78e8988"} err="failed to get container status \"b1dabc85133250e0e0a2fcb6fb2007fa6131638110337e42736c9b83d78e8988\": rpc error: code = NotFound desc = could not find container \"b1dabc85133250e0e0a2fcb6fb2007fa6131638110337e42736c9b83d78e8988\": container with ID starting with b1dabc85133250e0e0a2fcb6fb2007fa6131638110337e42736c9b83d78e8988 not found: ID does not exist" Dec 10 07:25:00 crc kubenswrapper[4765]: I1210 07:25:00.075376 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qnwfr" podUID="cea562f8-02be-4f64-b0b4-d8cfcb05d5af" containerName="registry-server" containerID="cri-o://5f83d1d9b6003c5cc8d5df1b337ee31535a382d42e7bd39d607ac1a3c3fcdd3c" gracePeriod=2 Dec 10 07:25:00 crc kubenswrapper[4765]: I1210 07:25:00.604688 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01" path="/var/lib/kubelet/pods/e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01/volumes" Dec 10 07:25:00 crc kubenswrapper[4765]: I1210 07:25:00.932351 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnwfr" Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.066805 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrksq\" (UniqueName: \"kubernetes.io/projected/cea562f8-02be-4f64-b0b4-d8cfcb05d5af-kube-api-access-jrksq\") pod \"cea562f8-02be-4f64-b0b4-d8cfcb05d5af\" (UID: \"cea562f8-02be-4f64-b0b4-d8cfcb05d5af\") " Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.066887 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cea562f8-02be-4f64-b0b4-d8cfcb05d5af-utilities\") pod \"cea562f8-02be-4f64-b0b4-d8cfcb05d5af\" (UID: \"cea562f8-02be-4f64-b0b4-d8cfcb05d5af\") " Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.066930 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cea562f8-02be-4f64-b0b4-d8cfcb05d5af-catalog-content\") pod \"cea562f8-02be-4f64-b0b4-d8cfcb05d5af\" (UID: \"cea562f8-02be-4f64-b0b4-d8cfcb05d5af\") " Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.067810 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cea562f8-02be-4f64-b0b4-d8cfcb05d5af-utilities" (OuterVolumeSpecName: "utilities") pod "cea562f8-02be-4f64-b0b4-d8cfcb05d5af" (UID: "cea562f8-02be-4f64-b0b4-d8cfcb05d5af"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.071788 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cea562f8-02be-4f64-b0b4-d8cfcb05d5af-kube-api-access-jrksq" (OuterVolumeSpecName: "kube-api-access-jrksq") pod "cea562f8-02be-4f64-b0b4-d8cfcb05d5af" (UID: "cea562f8-02be-4f64-b0b4-d8cfcb05d5af"). InnerVolumeSpecName "kube-api-access-jrksq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.086825 4765 generic.go:334] "Generic (PLEG): container finished" podID="cea562f8-02be-4f64-b0b4-d8cfcb05d5af" containerID="5f83d1d9b6003c5cc8d5df1b337ee31535a382d42e7bd39d607ac1a3c3fcdd3c" exitCode=0 Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.086882 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnwfr" event={"ID":"cea562f8-02be-4f64-b0b4-d8cfcb05d5af","Type":"ContainerDied","Data":"5f83d1d9b6003c5cc8d5df1b337ee31535a382d42e7bd39d607ac1a3c3fcdd3c"} Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.086921 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnwfr" event={"ID":"cea562f8-02be-4f64-b0b4-d8cfcb05d5af","Type":"ContainerDied","Data":"1cca5b3ef389c4f1c073ffd55ec5618f736815e1beceafb27c14d8cd4d18c7cd"} Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.086942 4765 scope.go:117] "RemoveContainer" containerID="5f83d1d9b6003c5cc8d5df1b337ee31535a382d42e7bd39d607ac1a3c3fcdd3c" Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.086965 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnwfr" Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.108649 4765 scope.go:117] "RemoveContainer" containerID="6c1d962f784180357dc1b00d776b6132e817a17ff34803f58cfff6b61074da1a" Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.126743 4765 scope.go:117] "RemoveContainer" containerID="afd50889a097b13db41f24eb614130a13e377e479d4fa3cb62ae6f8b362ad4b2" Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.131439 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cea562f8-02be-4f64-b0b4-d8cfcb05d5af-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cea562f8-02be-4f64-b0b4-d8cfcb05d5af" (UID: "cea562f8-02be-4f64-b0b4-d8cfcb05d5af"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.164639 4765 scope.go:117] "RemoveContainer" containerID="5f83d1d9b6003c5cc8d5df1b337ee31535a382d42e7bd39d607ac1a3c3fcdd3c" Dec 10 07:25:01 crc kubenswrapper[4765]: E1210 07:25:01.165243 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f83d1d9b6003c5cc8d5df1b337ee31535a382d42e7bd39d607ac1a3c3fcdd3c\": container with ID starting with 5f83d1d9b6003c5cc8d5df1b337ee31535a382d42e7bd39d607ac1a3c3fcdd3c not found: ID does not exist" containerID="5f83d1d9b6003c5cc8d5df1b337ee31535a382d42e7bd39d607ac1a3c3fcdd3c" Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.165284 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f83d1d9b6003c5cc8d5df1b337ee31535a382d42e7bd39d607ac1a3c3fcdd3c"} err="failed to get container status \"5f83d1d9b6003c5cc8d5df1b337ee31535a382d42e7bd39d607ac1a3c3fcdd3c\": rpc error: code = NotFound desc = could not find container \"5f83d1d9b6003c5cc8d5df1b337ee31535a382d42e7bd39d607ac1a3c3fcdd3c\": container with ID starting with 5f83d1d9b6003c5cc8d5df1b337ee31535a382d42e7bd39d607ac1a3c3fcdd3c not found: ID does not exist" Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.165308 4765 scope.go:117] "RemoveContainer" containerID="6c1d962f784180357dc1b00d776b6132e817a17ff34803f58cfff6b61074da1a" Dec 10 07:25:01 crc kubenswrapper[4765]: E1210 07:25:01.165769 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c1d962f784180357dc1b00d776b6132e817a17ff34803f58cfff6b61074da1a\": container with ID starting with 6c1d962f784180357dc1b00d776b6132e817a17ff34803f58cfff6b61074da1a not found: ID does not exist" containerID="6c1d962f784180357dc1b00d776b6132e817a17ff34803f58cfff6b61074da1a" Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.165798 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c1d962f784180357dc1b00d776b6132e817a17ff34803f58cfff6b61074da1a"} err="failed to get container status \"6c1d962f784180357dc1b00d776b6132e817a17ff34803f58cfff6b61074da1a\": rpc error: code = NotFound desc = could not find container \"6c1d962f784180357dc1b00d776b6132e817a17ff34803f58cfff6b61074da1a\": container with ID starting with 6c1d962f784180357dc1b00d776b6132e817a17ff34803f58cfff6b61074da1a not found: ID does not exist" Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.165817 4765 scope.go:117] "RemoveContainer" containerID="afd50889a097b13db41f24eb614130a13e377e479d4fa3cb62ae6f8b362ad4b2" Dec 10 07:25:01 crc kubenswrapper[4765]: E1210 07:25:01.166196 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afd50889a097b13db41f24eb614130a13e377e479d4fa3cb62ae6f8b362ad4b2\": container with ID starting with afd50889a097b13db41f24eb614130a13e377e479d4fa3cb62ae6f8b362ad4b2 not found: ID does not exist" containerID="afd50889a097b13db41f24eb614130a13e377e479d4fa3cb62ae6f8b362ad4b2" Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.166216 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afd50889a097b13db41f24eb614130a13e377e479d4fa3cb62ae6f8b362ad4b2"} err="failed to get container status \"afd50889a097b13db41f24eb614130a13e377e479d4fa3cb62ae6f8b362ad4b2\": rpc error: code = NotFound desc = could not find container \"afd50889a097b13db41f24eb614130a13e377e479d4fa3cb62ae6f8b362ad4b2\": container with ID starting with afd50889a097b13db41f24eb614130a13e377e479d4fa3cb62ae6f8b362ad4b2 not found: ID does not exist" Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.168254 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrksq\" (UniqueName: \"kubernetes.io/projected/cea562f8-02be-4f64-b0b4-d8cfcb05d5af-kube-api-access-jrksq\") on node \"crc\" DevicePath \"\"" Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.168302 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cea562f8-02be-4f64-b0b4-d8cfcb05d5af-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.168318 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cea562f8-02be-4f64-b0b4-d8cfcb05d5af-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.419642 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qnwfr"] Dec 10 07:25:01 crc kubenswrapper[4765]: I1210 07:25:01.425386 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qnwfr"] Dec 10 07:25:01 crc kubenswrapper[4765]: E1210 07:25:01.548834 4765 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcea562f8_02be_4f64_b0b4_d8cfcb05d5af.slice/crio-1cca5b3ef389c4f1c073ffd55ec5618f736815e1beceafb27c14d8cd4d18c7cd\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcea562f8_02be_4f64_b0b4_d8cfcb05d5af.slice\": RecentStats: unable to find data in memory cache]" Dec 10 07:25:02 crc kubenswrapper[4765]: I1210 07:25:02.599428 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cea562f8-02be-4f64-b0b4-d8cfcb05d5af" path="/var/lib/kubelet/pods/cea562f8-02be-4f64-b0b4-d8cfcb05d5af/volumes" Dec 10 07:25:04 crc kubenswrapper[4765]: I1210 07:25:04.049799 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:25:04 crc kubenswrapper[4765]: I1210 07:25:04.049897 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:25:34 crc kubenswrapper[4765]: I1210 07:25:34.050197 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:25:34 crc kubenswrapper[4765]: I1210 07:25:34.050724 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:25:34 crc kubenswrapper[4765]: I1210 07:25:34.050780 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 07:25:34 crc kubenswrapper[4765]: I1210 07:25:34.051560 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f"} pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 07:25:34 crc kubenswrapper[4765]: I1210 07:25:34.051627 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" containerID="cri-o://af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" gracePeriod=600 Dec 10 07:25:34 crc kubenswrapper[4765]: E1210 07:25:34.182747 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:25:34 crc kubenswrapper[4765]: I1210 07:25:34.372818 4765 generic.go:334] "Generic (PLEG): container finished" podID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" exitCode=0 Dec 10 07:25:34 crc kubenswrapper[4765]: I1210 07:25:34.372872 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerDied","Data":"af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f"} Dec 10 07:25:34 crc kubenswrapper[4765]: I1210 07:25:34.372912 4765 scope.go:117] "RemoveContainer" containerID="a74333ecfa6249aca625258a4eaba829d0f074ed18a38454142781e00eca7b94" Dec 10 07:25:34 crc kubenswrapper[4765]: I1210 07:25:34.373310 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:25:34 crc kubenswrapper[4765]: E1210 07:25:34.373504 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:25:49 crc kubenswrapper[4765]: I1210 07:25:49.589315 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:25:49 crc kubenswrapper[4765]: E1210 07:25:49.590038 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:26:02 crc kubenswrapper[4765]: I1210 07:26:02.588684 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:26:02 crc kubenswrapper[4765]: E1210 07:26:02.589450 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:26:17 crc kubenswrapper[4765]: I1210 07:26:17.588617 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:26:17 crc kubenswrapper[4765]: E1210 07:26:17.589408 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:26:32 crc kubenswrapper[4765]: I1210 07:26:32.589258 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:26:32 crc kubenswrapper[4765]: E1210 07:26:32.589999 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:26:46 crc kubenswrapper[4765]: I1210 07:26:46.589893 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:26:46 crc kubenswrapper[4765]: E1210 07:26:46.590817 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:27:01 crc kubenswrapper[4765]: I1210 07:27:01.589589 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:27:01 crc kubenswrapper[4765]: E1210 07:27:01.590558 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.771067 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-b5cdp"] Dec 10 07:27:09 crc kubenswrapper[4765]: E1210 07:27:09.772321 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01" containerName="registry-server" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.772344 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01" containerName="registry-server" Dec 10 07:27:09 crc kubenswrapper[4765]: E1210 07:27:09.772392 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01" containerName="extract-utilities" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.772407 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01" containerName="extract-utilities" Dec 10 07:27:09 crc kubenswrapper[4765]: E1210 07:27:09.772440 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cea562f8-02be-4f64-b0b4-d8cfcb05d5af" containerName="extract-content" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.772456 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="cea562f8-02be-4f64-b0b4-d8cfcb05d5af" containerName="extract-content" Dec 10 07:27:09 crc kubenswrapper[4765]: E1210 07:27:09.772480 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cea562f8-02be-4f64-b0b4-d8cfcb05d5af" containerName="extract-utilities" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.772492 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="cea562f8-02be-4f64-b0b4-d8cfcb05d5af" containerName="extract-utilities" Dec 10 07:27:09 crc kubenswrapper[4765]: E1210 07:27:09.772517 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cea562f8-02be-4f64-b0b4-d8cfcb05d5af" containerName="registry-server" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.772528 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="cea562f8-02be-4f64-b0b4-d8cfcb05d5af" containerName="registry-server" Dec 10 07:27:09 crc kubenswrapper[4765]: E1210 07:27:09.772564 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01" containerName="extract-content" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.772576 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01" containerName="extract-content" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.772829 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="e070d6e5-345d-4ab8-bb1d-b5a8e47dfd01" containerName="registry-server" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.772883 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="cea562f8-02be-4f64-b0b4-d8cfcb05d5af" containerName="registry-server" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.784965 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b5cdp" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.836320 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a0cd5cf-55f5-4500-b467-9130463502e7-utilities\") pod \"redhat-marketplace-b5cdp\" (UID: \"3a0cd5cf-55f5-4500-b467-9130463502e7\") " pod="openshift-marketplace/redhat-marketplace-b5cdp" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.836407 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbhcb\" (UniqueName: \"kubernetes.io/projected/3a0cd5cf-55f5-4500-b467-9130463502e7-kube-api-access-qbhcb\") pod \"redhat-marketplace-b5cdp\" (UID: \"3a0cd5cf-55f5-4500-b467-9130463502e7\") " pod="openshift-marketplace/redhat-marketplace-b5cdp" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.836463 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a0cd5cf-55f5-4500-b467-9130463502e7-catalog-content\") pod \"redhat-marketplace-b5cdp\" (UID: \"3a0cd5cf-55f5-4500-b467-9130463502e7\") " pod="openshift-marketplace/redhat-marketplace-b5cdp" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.847794 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b5cdp"] Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.938409 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a0cd5cf-55f5-4500-b467-9130463502e7-utilities\") pod \"redhat-marketplace-b5cdp\" (UID: \"3a0cd5cf-55f5-4500-b467-9130463502e7\") " pod="openshift-marketplace/redhat-marketplace-b5cdp" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.938495 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbhcb\" (UniqueName: \"kubernetes.io/projected/3a0cd5cf-55f5-4500-b467-9130463502e7-kube-api-access-qbhcb\") pod \"redhat-marketplace-b5cdp\" (UID: \"3a0cd5cf-55f5-4500-b467-9130463502e7\") " pod="openshift-marketplace/redhat-marketplace-b5cdp" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.938540 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a0cd5cf-55f5-4500-b467-9130463502e7-catalog-content\") pod \"redhat-marketplace-b5cdp\" (UID: \"3a0cd5cf-55f5-4500-b467-9130463502e7\") " pod="openshift-marketplace/redhat-marketplace-b5cdp" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.939161 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a0cd5cf-55f5-4500-b467-9130463502e7-utilities\") pod \"redhat-marketplace-b5cdp\" (UID: \"3a0cd5cf-55f5-4500-b467-9130463502e7\") " pod="openshift-marketplace/redhat-marketplace-b5cdp" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.939184 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a0cd5cf-55f5-4500-b467-9130463502e7-catalog-content\") pod \"redhat-marketplace-b5cdp\" (UID: \"3a0cd5cf-55f5-4500-b467-9130463502e7\") " pod="openshift-marketplace/redhat-marketplace-b5cdp" Dec 10 07:27:09 crc kubenswrapper[4765]: I1210 07:27:09.967719 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbhcb\" (UniqueName: \"kubernetes.io/projected/3a0cd5cf-55f5-4500-b467-9130463502e7-kube-api-access-qbhcb\") pod \"redhat-marketplace-b5cdp\" (UID: \"3a0cd5cf-55f5-4500-b467-9130463502e7\") " pod="openshift-marketplace/redhat-marketplace-b5cdp" Dec 10 07:27:10 crc kubenswrapper[4765]: I1210 07:27:10.151604 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b5cdp" Dec 10 07:27:10 crc kubenswrapper[4765]: I1210 07:27:10.610139 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b5cdp"] Dec 10 07:27:11 crc kubenswrapper[4765]: I1210 07:27:11.060269 4765 generic.go:334] "Generic (PLEG): container finished" podID="3a0cd5cf-55f5-4500-b467-9130463502e7" containerID="a15df67905df6936968f85f29913af6fa567c7a32f208c02143acd51f5c854f1" exitCode=0 Dec 10 07:27:11 crc kubenswrapper[4765]: I1210 07:27:11.060311 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b5cdp" event={"ID":"3a0cd5cf-55f5-4500-b467-9130463502e7","Type":"ContainerDied","Data":"a15df67905df6936968f85f29913af6fa567c7a32f208c02143acd51f5c854f1"} Dec 10 07:27:11 crc kubenswrapper[4765]: I1210 07:27:11.060338 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b5cdp" event={"ID":"3a0cd5cf-55f5-4500-b467-9130463502e7","Type":"ContainerStarted","Data":"2c768ae61dd8bd4a6759009b1faf4cee10e52df1dd8164f468e3784c14de6278"} Dec 10 07:27:11 crc kubenswrapper[4765]: I1210 07:27:11.062400 4765 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 07:27:12 crc kubenswrapper[4765]: I1210 07:27:12.071605 4765 generic.go:334] "Generic (PLEG): container finished" podID="3a0cd5cf-55f5-4500-b467-9130463502e7" containerID="3ed18f57e2ad0e320789afeafbc7d90872102e2bd94033b064d5e49ef0315473" exitCode=0 Dec 10 07:27:12 crc kubenswrapper[4765]: I1210 07:27:12.071718 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b5cdp" event={"ID":"3a0cd5cf-55f5-4500-b467-9130463502e7","Type":"ContainerDied","Data":"3ed18f57e2ad0e320789afeafbc7d90872102e2bd94033b064d5e49ef0315473"} Dec 10 07:27:13 crc kubenswrapper[4765]: I1210 07:27:13.081034 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b5cdp" event={"ID":"3a0cd5cf-55f5-4500-b467-9130463502e7","Type":"ContainerStarted","Data":"08b3f9e1a84002be212ad7f19b101dd751a5f9d844202cef397bd52d46c203c1"} Dec 10 07:27:13 crc kubenswrapper[4765]: I1210 07:27:13.098244 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-b5cdp" podStartSLOduration=2.591921238 podStartE2EDuration="4.098204855s" podCreationTimestamp="2025-12-10 07:27:09 +0000 UTC" firstStartedPulling="2025-12-10 07:27:11.062163121 +0000 UTC m=+2350.788828437" lastFinishedPulling="2025-12-10 07:27:12.568446738 +0000 UTC m=+2352.295112054" observedRunningTime="2025-12-10 07:27:13.096917219 +0000 UTC m=+2352.823582545" watchObservedRunningTime="2025-12-10 07:27:13.098204855 +0000 UTC m=+2352.824870171" Dec 10 07:27:16 crc kubenswrapper[4765]: I1210 07:27:16.589657 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:27:16 crc kubenswrapper[4765]: E1210 07:27:16.590276 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:27:20 crc kubenswrapper[4765]: I1210 07:27:20.152668 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-b5cdp" Dec 10 07:27:20 crc kubenswrapper[4765]: I1210 07:27:20.153037 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-b5cdp" Dec 10 07:27:20 crc kubenswrapper[4765]: I1210 07:27:20.196775 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-b5cdp" Dec 10 07:27:21 crc kubenswrapper[4765]: I1210 07:27:21.182495 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-b5cdp" Dec 10 07:27:21 crc kubenswrapper[4765]: I1210 07:27:21.227837 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b5cdp"] Dec 10 07:27:23 crc kubenswrapper[4765]: I1210 07:27:23.158181 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-b5cdp" podUID="3a0cd5cf-55f5-4500-b467-9130463502e7" containerName="registry-server" containerID="cri-o://08b3f9e1a84002be212ad7f19b101dd751a5f9d844202cef397bd52d46c203c1" gracePeriod=2 Dec 10 07:27:24 crc kubenswrapper[4765]: I1210 07:27:24.173622 4765 generic.go:334] "Generic (PLEG): container finished" podID="3a0cd5cf-55f5-4500-b467-9130463502e7" containerID="08b3f9e1a84002be212ad7f19b101dd751a5f9d844202cef397bd52d46c203c1" exitCode=0 Dec 10 07:27:24 crc kubenswrapper[4765]: I1210 07:27:24.173676 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b5cdp" event={"ID":"3a0cd5cf-55f5-4500-b467-9130463502e7","Type":"ContainerDied","Data":"08b3f9e1a84002be212ad7f19b101dd751a5f9d844202cef397bd52d46c203c1"} Dec 10 07:27:24 crc kubenswrapper[4765]: I1210 07:27:24.248021 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b5cdp" Dec 10 07:27:24 crc kubenswrapper[4765]: I1210 07:27:24.433411 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbhcb\" (UniqueName: \"kubernetes.io/projected/3a0cd5cf-55f5-4500-b467-9130463502e7-kube-api-access-qbhcb\") pod \"3a0cd5cf-55f5-4500-b467-9130463502e7\" (UID: \"3a0cd5cf-55f5-4500-b467-9130463502e7\") " Dec 10 07:27:24 crc kubenswrapper[4765]: I1210 07:27:24.433940 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a0cd5cf-55f5-4500-b467-9130463502e7-catalog-content\") pod \"3a0cd5cf-55f5-4500-b467-9130463502e7\" (UID: \"3a0cd5cf-55f5-4500-b467-9130463502e7\") " Dec 10 07:27:24 crc kubenswrapper[4765]: I1210 07:27:24.434061 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a0cd5cf-55f5-4500-b467-9130463502e7-utilities\") pod \"3a0cd5cf-55f5-4500-b467-9130463502e7\" (UID: \"3a0cd5cf-55f5-4500-b467-9130463502e7\") " Dec 10 07:27:24 crc kubenswrapper[4765]: I1210 07:27:24.443491 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a0cd5cf-55f5-4500-b467-9130463502e7-utilities" (OuterVolumeSpecName: "utilities") pod "3a0cd5cf-55f5-4500-b467-9130463502e7" (UID: "3a0cd5cf-55f5-4500-b467-9130463502e7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:27:24 crc kubenswrapper[4765]: I1210 07:27:24.454289 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a0cd5cf-55f5-4500-b467-9130463502e7-kube-api-access-qbhcb" (OuterVolumeSpecName: "kube-api-access-qbhcb") pod "3a0cd5cf-55f5-4500-b467-9130463502e7" (UID: "3a0cd5cf-55f5-4500-b467-9130463502e7"). InnerVolumeSpecName "kube-api-access-qbhcb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:27:24 crc kubenswrapper[4765]: I1210 07:27:24.496407 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a0cd5cf-55f5-4500-b467-9130463502e7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3a0cd5cf-55f5-4500-b467-9130463502e7" (UID: "3a0cd5cf-55f5-4500-b467-9130463502e7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:27:24 crc kubenswrapper[4765]: I1210 07:27:24.536721 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbhcb\" (UniqueName: \"kubernetes.io/projected/3a0cd5cf-55f5-4500-b467-9130463502e7-kube-api-access-qbhcb\") on node \"crc\" DevicePath \"\"" Dec 10 07:27:24 crc kubenswrapper[4765]: I1210 07:27:24.536771 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a0cd5cf-55f5-4500-b467-9130463502e7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:27:24 crc kubenswrapper[4765]: I1210 07:27:24.536782 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a0cd5cf-55f5-4500-b467-9130463502e7-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:27:25 crc kubenswrapper[4765]: I1210 07:27:25.183787 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b5cdp" event={"ID":"3a0cd5cf-55f5-4500-b467-9130463502e7","Type":"ContainerDied","Data":"2c768ae61dd8bd4a6759009b1faf4cee10e52df1dd8164f468e3784c14de6278"} Dec 10 07:27:25 crc kubenswrapper[4765]: I1210 07:27:25.183889 4765 scope.go:117] "RemoveContainer" containerID="08b3f9e1a84002be212ad7f19b101dd751a5f9d844202cef397bd52d46c203c1" Dec 10 07:27:25 crc kubenswrapper[4765]: I1210 07:27:25.183921 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b5cdp" Dec 10 07:27:25 crc kubenswrapper[4765]: I1210 07:27:25.211455 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b5cdp"] Dec 10 07:27:25 crc kubenswrapper[4765]: I1210 07:27:25.212841 4765 scope.go:117] "RemoveContainer" containerID="3ed18f57e2ad0e320789afeafbc7d90872102e2bd94033b064d5e49ef0315473" Dec 10 07:27:25 crc kubenswrapper[4765]: I1210 07:27:25.218844 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-b5cdp"] Dec 10 07:27:25 crc kubenswrapper[4765]: I1210 07:27:25.234489 4765 scope.go:117] "RemoveContainer" containerID="a15df67905df6936968f85f29913af6fa567c7a32f208c02143acd51f5c854f1" Dec 10 07:27:26 crc kubenswrapper[4765]: I1210 07:27:26.598876 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a0cd5cf-55f5-4500-b467-9130463502e7" path="/var/lib/kubelet/pods/3a0cd5cf-55f5-4500-b467-9130463502e7/volumes" Dec 10 07:27:30 crc kubenswrapper[4765]: I1210 07:27:30.592668 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:27:30 crc kubenswrapper[4765]: E1210 07:27:30.592952 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:27:42 crc kubenswrapper[4765]: I1210 07:27:42.589778 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:27:42 crc kubenswrapper[4765]: E1210 07:27:42.590527 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:27:56 crc kubenswrapper[4765]: I1210 07:27:56.588733 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:27:56 crc kubenswrapper[4765]: E1210 07:27:56.590368 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:28:10 crc kubenswrapper[4765]: I1210 07:28:10.592743 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:28:10 crc kubenswrapper[4765]: E1210 07:28:10.593494 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:28:21 crc kubenswrapper[4765]: I1210 07:28:21.588685 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:28:21 crc kubenswrapper[4765]: E1210 07:28:21.589481 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:28:36 crc kubenswrapper[4765]: I1210 07:28:36.588850 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:28:36 crc kubenswrapper[4765]: E1210 07:28:36.589731 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:28:48 crc kubenswrapper[4765]: I1210 07:28:48.589155 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:28:48 crc kubenswrapper[4765]: E1210 07:28:48.589939 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:29:01 crc kubenswrapper[4765]: I1210 07:29:01.588777 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:29:01 crc kubenswrapper[4765]: E1210 07:29:01.589740 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:29:15 crc kubenswrapper[4765]: I1210 07:29:15.589195 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:29:15 crc kubenswrapper[4765]: E1210 07:29:15.590353 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:29:29 crc kubenswrapper[4765]: I1210 07:29:29.589467 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:29:29 crc kubenswrapper[4765]: E1210 07:29:29.590345 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:29:41 crc kubenswrapper[4765]: I1210 07:29:41.589460 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:29:41 crc kubenswrapper[4765]: E1210 07:29:41.591343 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:29:52 crc kubenswrapper[4765]: I1210 07:29:52.591193 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:29:52 crc kubenswrapper[4765]: E1210 07:29:52.591968 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.146194 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x"] Dec 10 07:30:00 crc kubenswrapper[4765]: E1210 07:30:00.146828 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a0cd5cf-55f5-4500-b467-9130463502e7" containerName="registry-server" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.146841 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a0cd5cf-55f5-4500-b467-9130463502e7" containerName="registry-server" Dec 10 07:30:00 crc kubenswrapper[4765]: E1210 07:30:00.146861 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a0cd5cf-55f5-4500-b467-9130463502e7" containerName="extract-content" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.146868 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a0cd5cf-55f5-4500-b467-9130463502e7" containerName="extract-content" Dec 10 07:30:00 crc kubenswrapper[4765]: E1210 07:30:00.146877 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a0cd5cf-55f5-4500-b467-9130463502e7" containerName="extract-utilities" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.146883 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a0cd5cf-55f5-4500-b467-9130463502e7" containerName="extract-utilities" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.147031 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a0cd5cf-55f5-4500-b467-9130463502e7" containerName="registry-server" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.147585 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.149804 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.150007 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.157681 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x"] Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.259114 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/98f217d1-015d-487c-b8cf-8721eb0e2637-secret-volume\") pod \"collect-profiles-29422530-lgz5x\" (UID: \"98f217d1-015d-487c-b8cf-8721eb0e2637\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.259193 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/98f217d1-015d-487c-b8cf-8721eb0e2637-config-volume\") pod \"collect-profiles-29422530-lgz5x\" (UID: \"98f217d1-015d-487c-b8cf-8721eb0e2637\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.259356 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xksjj\" (UniqueName: \"kubernetes.io/projected/98f217d1-015d-487c-b8cf-8721eb0e2637-kube-api-access-xksjj\") pod \"collect-profiles-29422530-lgz5x\" (UID: \"98f217d1-015d-487c-b8cf-8721eb0e2637\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.360608 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/98f217d1-015d-487c-b8cf-8721eb0e2637-secret-volume\") pod \"collect-profiles-29422530-lgz5x\" (UID: \"98f217d1-015d-487c-b8cf-8721eb0e2637\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.360689 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/98f217d1-015d-487c-b8cf-8721eb0e2637-config-volume\") pod \"collect-profiles-29422530-lgz5x\" (UID: \"98f217d1-015d-487c-b8cf-8721eb0e2637\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.360738 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xksjj\" (UniqueName: \"kubernetes.io/projected/98f217d1-015d-487c-b8cf-8721eb0e2637-kube-api-access-xksjj\") pod \"collect-profiles-29422530-lgz5x\" (UID: \"98f217d1-015d-487c-b8cf-8721eb0e2637\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.361977 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/98f217d1-015d-487c-b8cf-8721eb0e2637-config-volume\") pod \"collect-profiles-29422530-lgz5x\" (UID: \"98f217d1-015d-487c-b8cf-8721eb0e2637\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.368366 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/98f217d1-015d-487c-b8cf-8721eb0e2637-secret-volume\") pod \"collect-profiles-29422530-lgz5x\" (UID: \"98f217d1-015d-487c-b8cf-8721eb0e2637\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.378435 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xksjj\" (UniqueName: \"kubernetes.io/projected/98f217d1-015d-487c-b8cf-8721eb0e2637-kube-api-access-xksjj\") pod \"collect-profiles-29422530-lgz5x\" (UID: \"98f217d1-015d-487c-b8cf-8721eb0e2637\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.469528 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x" Dec 10 07:30:00 crc kubenswrapper[4765]: I1210 07:30:00.891233 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x"] Dec 10 07:30:01 crc kubenswrapper[4765]: I1210 07:30:01.357433 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x" event={"ID":"98f217d1-015d-487c-b8cf-8721eb0e2637","Type":"ContainerDied","Data":"1e2f9005553f43285b78eaf6a690872ecde591b649752e49c30317b90e11595d"} Dec 10 07:30:01 crc kubenswrapper[4765]: I1210 07:30:01.357391 4765 generic.go:334] "Generic (PLEG): container finished" podID="98f217d1-015d-487c-b8cf-8721eb0e2637" containerID="1e2f9005553f43285b78eaf6a690872ecde591b649752e49c30317b90e11595d" exitCode=0 Dec 10 07:30:01 crc kubenswrapper[4765]: I1210 07:30:01.357793 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x" event={"ID":"98f217d1-015d-487c-b8cf-8721eb0e2637","Type":"ContainerStarted","Data":"f469129c6a6c57b7fe93ee2f7b89bc4b651c9d624a061f76f2d1a128d7a3a793"} Dec 10 07:30:02 crc kubenswrapper[4765]: I1210 07:30:02.655055 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x" Dec 10 07:30:02 crc kubenswrapper[4765]: I1210 07:30:02.793426 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/98f217d1-015d-487c-b8cf-8721eb0e2637-config-volume\") pod \"98f217d1-015d-487c-b8cf-8721eb0e2637\" (UID: \"98f217d1-015d-487c-b8cf-8721eb0e2637\") " Dec 10 07:30:02 crc kubenswrapper[4765]: I1210 07:30:02.793583 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xksjj\" (UniqueName: \"kubernetes.io/projected/98f217d1-015d-487c-b8cf-8721eb0e2637-kube-api-access-xksjj\") pod \"98f217d1-015d-487c-b8cf-8721eb0e2637\" (UID: \"98f217d1-015d-487c-b8cf-8721eb0e2637\") " Dec 10 07:30:02 crc kubenswrapper[4765]: I1210 07:30:02.793609 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/98f217d1-015d-487c-b8cf-8721eb0e2637-secret-volume\") pod \"98f217d1-015d-487c-b8cf-8721eb0e2637\" (UID: \"98f217d1-015d-487c-b8cf-8721eb0e2637\") " Dec 10 07:30:02 crc kubenswrapper[4765]: I1210 07:30:02.794582 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98f217d1-015d-487c-b8cf-8721eb0e2637-config-volume" (OuterVolumeSpecName: "config-volume") pod "98f217d1-015d-487c-b8cf-8721eb0e2637" (UID: "98f217d1-015d-487c-b8cf-8721eb0e2637"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:30:02 crc kubenswrapper[4765]: I1210 07:30:02.795056 4765 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/98f217d1-015d-487c-b8cf-8721eb0e2637-config-volume\") on node \"crc\" DevicePath \"\"" Dec 10 07:30:02 crc kubenswrapper[4765]: I1210 07:30:02.798919 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98f217d1-015d-487c-b8cf-8721eb0e2637-kube-api-access-xksjj" (OuterVolumeSpecName: "kube-api-access-xksjj") pod "98f217d1-015d-487c-b8cf-8721eb0e2637" (UID: "98f217d1-015d-487c-b8cf-8721eb0e2637"). InnerVolumeSpecName "kube-api-access-xksjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:30:02 crc kubenswrapper[4765]: I1210 07:30:02.799158 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98f217d1-015d-487c-b8cf-8721eb0e2637-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "98f217d1-015d-487c-b8cf-8721eb0e2637" (UID: "98f217d1-015d-487c-b8cf-8721eb0e2637"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:30:02 crc kubenswrapper[4765]: I1210 07:30:02.896442 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xksjj\" (UniqueName: \"kubernetes.io/projected/98f217d1-015d-487c-b8cf-8721eb0e2637-kube-api-access-xksjj\") on node \"crc\" DevicePath \"\"" Dec 10 07:30:02 crc kubenswrapper[4765]: I1210 07:30:02.896481 4765 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/98f217d1-015d-487c-b8cf-8721eb0e2637-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 10 07:30:03 crc kubenswrapper[4765]: I1210 07:30:03.398345 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x" event={"ID":"98f217d1-015d-487c-b8cf-8721eb0e2637","Type":"ContainerDied","Data":"f469129c6a6c57b7fe93ee2f7b89bc4b651c9d624a061f76f2d1a128d7a3a793"} Dec 10 07:30:03 crc kubenswrapper[4765]: I1210 07:30:03.419431 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422530-lgz5x" Dec 10 07:30:03 crc kubenswrapper[4765]: I1210 07:30:03.419651 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f469129c6a6c57b7fe93ee2f7b89bc4b651c9d624a061f76f2d1a128d7a3a793" Dec 10 07:30:03 crc kubenswrapper[4765]: I1210 07:30:03.589644 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:30:03 crc kubenswrapper[4765]: E1210 07:30:03.590148 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:30:03 crc kubenswrapper[4765]: I1210 07:30:03.728941 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm"] Dec 10 07:30:03 crc kubenswrapper[4765]: I1210 07:30:03.736036 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422485-p8qrm"] Dec 10 07:30:04 crc kubenswrapper[4765]: I1210 07:30:04.599146 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c444ed13-88e9-41b6-a7d6-c1fa51cb7e01" path="/var/lib/kubelet/pods/c444ed13-88e9-41b6-a7d6-c1fa51cb7e01/volumes" Dec 10 07:30:14 crc kubenswrapper[4765]: I1210 07:30:14.589567 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:30:14 crc kubenswrapper[4765]: E1210 07:30:14.590210 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:30:25 crc kubenswrapper[4765]: I1210 07:30:25.590201 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:30:25 crc kubenswrapper[4765]: E1210 07:30:25.591185 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:30:29 crc kubenswrapper[4765]: I1210 07:30:29.542412 4765 scope.go:117] "RemoveContainer" containerID="8b3c9917c429f0d653cc4a6fab177b0a187ef6e29fed5e5b430fe485fe946084" Dec 10 07:30:36 crc kubenswrapper[4765]: I1210 07:30:36.589342 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:30:37 crc kubenswrapper[4765]: I1210 07:30:37.688543 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"936bdb47f341f43c2b82cb52710879031640eca8fb8ab90e2134c80f74e37ff0"} Dec 10 07:32:09 crc kubenswrapper[4765]: I1210 07:32:09.222761 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8vl2k"] Dec 10 07:32:09 crc kubenswrapper[4765]: E1210 07:32:09.223813 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98f217d1-015d-487c-b8cf-8721eb0e2637" containerName="collect-profiles" Dec 10 07:32:09 crc kubenswrapper[4765]: I1210 07:32:09.223832 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="98f217d1-015d-487c-b8cf-8721eb0e2637" containerName="collect-profiles" Dec 10 07:32:09 crc kubenswrapper[4765]: I1210 07:32:09.224046 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="98f217d1-015d-487c-b8cf-8721eb0e2637" containerName="collect-profiles" Dec 10 07:32:09 crc kubenswrapper[4765]: I1210 07:32:09.225617 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8vl2k" Dec 10 07:32:09 crc kubenswrapper[4765]: I1210 07:32:09.241249 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8vl2k"] Dec 10 07:32:09 crc kubenswrapper[4765]: I1210 07:32:09.352826 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwnq6\" (UniqueName: \"kubernetes.io/projected/64e4a2d0-e85f-41ba-bfb3-f3997766a85c-kube-api-access-kwnq6\") pod \"redhat-operators-8vl2k\" (UID: \"64e4a2d0-e85f-41ba-bfb3-f3997766a85c\") " pod="openshift-marketplace/redhat-operators-8vl2k" Dec 10 07:32:09 crc kubenswrapper[4765]: I1210 07:32:09.352937 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64e4a2d0-e85f-41ba-bfb3-f3997766a85c-catalog-content\") pod \"redhat-operators-8vl2k\" (UID: \"64e4a2d0-e85f-41ba-bfb3-f3997766a85c\") " pod="openshift-marketplace/redhat-operators-8vl2k" Dec 10 07:32:09 crc kubenswrapper[4765]: I1210 07:32:09.352978 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64e4a2d0-e85f-41ba-bfb3-f3997766a85c-utilities\") pod \"redhat-operators-8vl2k\" (UID: \"64e4a2d0-e85f-41ba-bfb3-f3997766a85c\") " pod="openshift-marketplace/redhat-operators-8vl2k" Dec 10 07:32:09 crc kubenswrapper[4765]: I1210 07:32:09.455046 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwnq6\" (UniqueName: \"kubernetes.io/projected/64e4a2d0-e85f-41ba-bfb3-f3997766a85c-kube-api-access-kwnq6\") pod \"redhat-operators-8vl2k\" (UID: \"64e4a2d0-e85f-41ba-bfb3-f3997766a85c\") " pod="openshift-marketplace/redhat-operators-8vl2k" Dec 10 07:32:09 crc kubenswrapper[4765]: I1210 07:32:09.455221 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64e4a2d0-e85f-41ba-bfb3-f3997766a85c-catalog-content\") pod \"redhat-operators-8vl2k\" (UID: \"64e4a2d0-e85f-41ba-bfb3-f3997766a85c\") " pod="openshift-marketplace/redhat-operators-8vl2k" Dec 10 07:32:09 crc kubenswrapper[4765]: I1210 07:32:09.455252 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64e4a2d0-e85f-41ba-bfb3-f3997766a85c-utilities\") pod \"redhat-operators-8vl2k\" (UID: \"64e4a2d0-e85f-41ba-bfb3-f3997766a85c\") " pod="openshift-marketplace/redhat-operators-8vl2k" Dec 10 07:32:09 crc kubenswrapper[4765]: I1210 07:32:09.455782 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64e4a2d0-e85f-41ba-bfb3-f3997766a85c-utilities\") pod \"redhat-operators-8vl2k\" (UID: \"64e4a2d0-e85f-41ba-bfb3-f3997766a85c\") " pod="openshift-marketplace/redhat-operators-8vl2k" Dec 10 07:32:09 crc kubenswrapper[4765]: I1210 07:32:09.455843 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64e4a2d0-e85f-41ba-bfb3-f3997766a85c-catalog-content\") pod \"redhat-operators-8vl2k\" (UID: \"64e4a2d0-e85f-41ba-bfb3-f3997766a85c\") " pod="openshift-marketplace/redhat-operators-8vl2k" Dec 10 07:32:09 crc kubenswrapper[4765]: I1210 07:32:09.478191 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwnq6\" (UniqueName: \"kubernetes.io/projected/64e4a2d0-e85f-41ba-bfb3-f3997766a85c-kube-api-access-kwnq6\") pod \"redhat-operators-8vl2k\" (UID: \"64e4a2d0-e85f-41ba-bfb3-f3997766a85c\") " pod="openshift-marketplace/redhat-operators-8vl2k" Dec 10 07:32:09 crc kubenswrapper[4765]: I1210 07:32:09.548338 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8vl2k" Dec 10 07:32:09 crc kubenswrapper[4765]: I1210 07:32:09.982116 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8vl2k"] Dec 10 07:32:10 crc kubenswrapper[4765]: I1210 07:32:10.399234 4765 generic.go:334] "Generic (PLEG): container finished" podID="64e4a2d0-e85f-41ba-bfb3-f3997766a85c" containerID="e98375facfa3b93c3406946101086817bc4de17d133e0a0b7b9c810bc3656e63" exitCode=0 Dec 10 07:32:10 crc kubenswrapper[4765]: I1210 07:32:10.399294 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8vl2k" event={"ID":"64e4a2d0-e85f-41ba-bfb3-f3997766a85c","Type":"ContainerDied","Data":"e98375facfa3b93c3406946101086817bc4de17d133e0a0b7b9c810bc3656e63"} Dec 10 07:32:10 crc kubenswrapper[4765]: I1210 07:32:10.399327 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8vl2k" event={"ID":"64e4a2d0-e85f-41ba-bfb3-f3997766a85c","Type":"ContainerStarted","Data":"287111a71fcc31f8bbb7f29a3a86968c8c8dbb2e1989342950b58a9e6154dca0"} Dec 10 07:32:11 crc kubenswrapper[4765]: I1210 07:32:11.410535 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8vl2k" event={"ID":"64e4a2d0-e85f-41ba-bfb3-f3997766a85c","Type":"ContainerStarted","Data":"f0b1de30f63da30e94c74edc216044ec86534990d5c66f84cc3b6073b62cab41"} Dec 10 07:32:12 crc kubenswrapper[4765]: I1210 07:32:12.419642 4765 generic.go:334] "Generic (PLEG): container finished" podID="64e4a2d0-e85f-41ba-bfb3-f3997766a85c" containerID="f0b1de30f63da30e94c74edc216044ec86534990d5c66f84cc3b6073b62cab41" exitCode=0 Dec 10 07:32:12 crc kubenswrapper[4765]: I1210 07:32:12.419692 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8vl2k" event={"ID":"64e4a2d0-e85f-41ba-bfb3-f3997766a85c","Type":"ContainerDied","Data":"f0b1de30f63da30e94c74edc216044ec86534990d5c66f84cc3b6073b62cab41"} Dec 10 07:32:12 crc kubenswrapper[4765]: I1210 07:32:12.421915 4765 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 07:32:13 crc kubenswrapper[4765]: I1210 07:32:13.431078 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8vl2k" event={"ID":"64e4a2d0-e85f-41ba-bfb3-f3997766a85c","Type":"ContainerStarted","Data":"e47d48b4dd8456ba3f98eaa9889657c69af3456fc4196d6710170154f85b92ea"} Dec 10 07:32:13 crc kubenswrapper[4765]: I1210 07:32:13.452458 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8vl2k" podStartSLOduration=1.843394371 podStartE2EDuration="4.452435291s" podCreationTimestamp="2025-12-10 07:32:09 +0000 UTC" firstStartedPulling="2025-12-10 07:32:10.401204586 +0000 UTC m=+2650.127869892" lastFinishedPulling="2025-12-10 07:32:13.010245496 +0000 UTC m=+2652.736910812" observedRunningTime="2025-12-10 07:32:13.45064108 +0000 UTC m=+2653.177306396" watchObservedRunningTime="2025-12-10 07:32:13.452435291 +0000 UTC m=+2653.179100607" Dec 10 07:32:19 crc kubenswrapper[4765]: I1210 07:32:19.549075 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8vl2k" Dec 10 07:32:19 crc kubenswrapper[4765]: I1210 07:32:19.550146 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8vl2k" Dec 10 07:32:19 crc kubenswrapper[4765]: I1210 07:32:19.602680 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8vl2k" Dec 10 07:32:20 crc kubenswrapper[4765]: I1210 07:32:20.529159 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8vl2k" Dec 10 07:32:20 crc kubenswrapper[4765]: I1210 07:32:20.573265 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8vl2k"] Dec 10 07:32:22 crc kubenswrapper[4765]: I1210 07:32:22.497516 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8vl2k" podUID="64e4a2d0-e85f-41ba-bfb3-f3997766a85c" containerName="registry-server" containerID="cri-o://e47d48b4dd8456ba3f98eaa9889657c69af3456fc4196d6710170154f85b92ea" gracePeriod=2 Dec 10 07:32:24 crc kubenswrapper[4765]: I1210 07:32:24.520907 4765 generic.go:334] "Generic (PLEG): container finished" podID="64e4a2d0-e85f-41ba-bfb3-f3997766a85c" containerID="e47d48b4dd8456ba3f98eaa9889657c69af3456fc4196d6710170154f85b92ea" exitCode=0 Dec 10 07:32:24 crc kubenswrapper[4765]: I1210 07:32:24.520998 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8vl2k" event={"ID":"64e4a2d0-e85f-41ba-bfb3-f3997766a85c","Type":"ContainerDied","Data":"e47d48b4dd8456ba3f98eaa9889657c69af3456fc4196d6710170154f85b92ea"} Dec 10 07:32:24 crc kubenswrapper[4765]: I1210 07:32:24.783384 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8vl2k" Dec 10 07:32:24 crc kubenswrapper[4765]: I1210 07:32:24.899054 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64e4a2d0-e85f-41ba-bfb3-f3997766a85c-catalog-content\") pod \"64e4a2d0-e85f-41ba-bfb3-f3997766a85c\" (UID: \"64e4a2d0-e85f-41ba-bfb3-f3997766a85c\") " Dec 10 07:32:24 crc kubenswrapper[4765]: I1210 07:32:24.899240 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64e4a2d0-e85f-41ba-bfb3-f3997766a85c-utilities\") pod \"64e4a2d0-e85f-41ba-bfb3-f3997766a85c\" (UID: \"64e4a2d0-e85f-41ba-bfb3-f3997766a85c\") " Dec 10 07:32:24 crc kubenswrapper[4765]: I1210 07:32:24.899375 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwnq6\" (UniqueName: \"kubernetes.io/projected/64e4a2d0-e85f-41ba-bfb3-f3997766a85c-kube-api-access-kwnq6\") pod \"64e4a2d0-e85f-41ba-bfb3-f3997766a85c\" (UID: \"64e4a2d0-e85f-41ba-bfb3-f3997766a85c\") " Dec 10 07:32:24 crc kubenswrapper[4765]: I1210 07:32:24.900333 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64e4a2d0-e85f-41ba-bfb3-f3997766a85c-utilities" (OuterVolumeSpecName: "utilities") pod "64e4a2d0-e85f-41ba-bfb3-f3997766a85c" (UID: "64e4a2d0-e85f-41ba-bfb3-f3997766a85c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:32:24 crc kubenswrapper[4765]: I1210 07:32:24.904953 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64e4a2d0-e85f-41ba-bfb3-f3997766a85c-kube-api-access-kwnq6" (OuterVolumeSpecName: "kube-api-access-kwnq6") pod "64e4a2d0-e85f-41ba-bfb3-f3997766a85c" (UID: "64e4a2d0-e85f-41ba-bfb3-f3997766a85c"). InnerVolumeSpecName "kube-api-access-kwnq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:32:25 crc kubenswrapper[4765]: I1210 07:32:25.001196 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64e4a2d0-e85f-41ba-bfb3-f3997766a85c-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:32:25 crc kubenswrapper[4765]: I1210 07:32:25.001231 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwnq6\" (UniqueName: \"kubernetes.io/projected/64e4a2d0-e85f-41ba-bfb3-f3997766a85c-kube-api-access-kwnq6\") on node \"crc\" DevicePath \"\"" Dec 10 07:32:25 crc kubenswrapper[4765]: I1210 07:32:25.029953 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64e4a2d0-e85f-41ba-bfb3-f3997766a85c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "64e4a2d0-e85f-41ba-bfb3-f3997766a85c" (UID: "64e4a2d0-e85f-41ba-bfb3-f3997766a85c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:32:25 crc kubenswrapper[4765]: I1210 07:32:25.102462 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64e4a2d0-e85f-41ba-bfb3-f3997766a85c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:32:25 crc kubenswrapper[4765]: I1210 07:32:25.533806 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8vl2k" event={"ID":"64e4a2d0-e85f-41ba-bfb3-f3997766a85c","Type":"ContainerDied","Data":"287111a71fcc31f8bbb7f29a3a86968c8c8dbb2e1989342950b58a9e6154dca0"} Dec 10 07:32:25 crc kubenswrapper[4765]: I1210 07:32:25.534205 4765 scope.go:117] "RemoveContainer" containerID="e47d48b4dd8456ba3f98eaa9889657c69af3456fc4196d6710170154f85b92ea" Dec 10 07:32:25 crc kubenswrapper[4765]: I1210 07:32:25.533908 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8vl2k" Dec 10 07:32:25 crc kubenswrapper[4765]: I1210 07:32:25.568790 4765 scope.go:117] "RemoveContainer" containerID="f0b1de30f63da30e94c74edc216044ec86534990d5c66f84cc3b6073b62cab41" Dec 10 07:32:25 crc kubenswrapper[4765]: I1210 07:32:25.585277 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8vl2k"] Dec 10 07:32:25 crc kubenswrapper[4765]: I1210 07:32:25.588254 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8vl2k"] Dec 10 07:32:25 crc kubenswrapper[4765]: I1210 07:32:25.590781 4765 scope.go:117] "RemoveContainer" containerID="e98375facfa3b93c3406946101086817bc4de17d133e0a0b7b9c810bc3656e63" Dec 10 07:32:26 crc kubenswrapper[4765]: I1210 07:32:26.600650 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64e4a2d0-e85f-41ba-bfb3-f3997766a85c" path="/var/lib/kubelet/pods/64e4a2d0-e85f-41ba-bfb3-f3997766a85c/volumes" Dec 10 07:33:04 crc kubenswrapper[4765]: I1210 07:33:04.049525 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:33:04 crc kubenswrapper[4765]: I1210 07:33:04.050142 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:33:34 crc kubenswrapper[4765]: I1210 07:33:34.049686 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:33:34 crc kubenswrapper[4765]: I1210 07:33:34.050357 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:34:04 crc kubenswrapper[4765]: I1210 07:34:04.049732 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:34:04 crc kubenswrapper[4765]: I1210 07:34:04.051530 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:34:04 crc kubenswrapper[4765]: I1210 07:34:04.051692 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 07:34:04 crc kubenswrapper[4765]: I1210 07:34:04.335735 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"936bdb47f341f43c2b82cb52710879031640eca8fb8ab90e2134c80f74e37ff0"} pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 07:34:04 crc kubenswrapper[4765]: I1210 07:34:04.335822 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" containerID="cri-o://936bdb47f341f43c2b82cb52710879031640eca8fb8ab90e2134c80f74e37ff0" gracePeriod=600 Dec 10 07:34:05 crc kubenswrapper[4765]: I1210 07:34:05.349022 4765 generic.go:334] "Generic (PLEG): container finished" podID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerID="936bdb47f341f43c2b82cb52710879031640eca8fb8ab90e2134c80f74e37ff0" exitCode=0 Dec 10 07:34:05 crc kubenswrapper[4765]: I1210 07:34:05.349115 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerDied","Data":"936bdb47f341f43c2b82cb52710879031640eca8fb8ab90e2134c80f74e37ff0"} Dec 10 07:34:05 crc kubenswrapper[4765]: I1210 07:34:05.349649 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97"} Dec 10 07:34:05 crc kubenswrapper[4765]: I1210 07:34:05.349683 4765 scope.go:117] "RemoveContainer" containerID="af5d80c6aeafe0acc9522179a1b497662d985c3fbedd05a4cfdf25ac550cb75f" Dec 10 07:35:13 crc kubenswrapper[4765]: I1210 07:35:13.813691 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hk658"] Dec 10 07:35:13 crc kubenswrapper[4765]: E1210 07:35:13.816153 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64e4a2d0-e85f-41ba-bfb3-f3997766a85c" containerName="extract-content" Dec 10 07:35:13 crc kubenswrapper[4765]: I1210 07:35:13.816283 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="64e4a2d0-e85f-41ba-bfb3-f3997766a85c" containerName="extract-content" Dec 10 07:35:13 crc kubenswrapper[4765]: E1210 07:35:13.816404 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64e4a2d0-e85f-41ba-bfb3-f3997766a85c" containerName="registry-server" Dec 10 07:35:13 crc kubenswrapper[4765]: I1210 07:35:13.816493 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="64e4a2d0-e85f-41ba-bfb3-f3997766a85c" containerName="registry-server" Dec 10 07:35:13 crc kubenswrapper[4765]: E1210 07:35:13.816680 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64e4a2d0-e85f-41ba-bfb3-f3997766a85c" containerName="extract-utilities" Dec 10 07:35:13 crc kubenswrapper[4765]: I1210 07:35:13.816894 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="64e4a2d0-e85f-41ba-bfb3-f3997766a85c" containerName="extract-utilities" Dec 10 07:35:13 crc kubenswrapper[4765]: I1210 07:35:13.817506 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="64e4a2d0-e85f-41ba-bfb3-f3997766a85c" containerName="registry-server" Dec 10 07:35:13 crc kubenswrapper[4765]: I1210 07:35:13.819066 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hk658" Dec 10 07:35:13 crc kubenswrapper[4765]: I1210 07:35:13.826423 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hk658"] Dec 10 07:35:13 crc kubenswrapper[4765]: I1210 07:35:13.978681 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a34c64f4-1191-4bb0-bb93-627119126f31-catalog-content\") pod \"community-operators-hk658\" (UID: \"a34c64f4-1191-4bb0-bb93-627119126f31\") " pod="openshift-marketplace/community-operators-hk658" Dec 10 07:35:13 crc kubenswrapper[4765]: I1210 07:35:13.978774 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a34c64f4-1191-4bb0-bb93-627119126f31-utilities\") pod \"community-operators-hk658\" (UID: \"a34c64f4-1191-4bb0-bb93-627119126f31\") " pod="openshift-marketplace/community-operators-hk658" Dec 10 07:35:13 crc kubenswrapper[4765]: I1210 07:35:13.978927 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zz8lj\" (UniqueName: \"kubernetes.io/projected/a34c64f4-1191-4bb0-bb93-627119126f31-kube-api-access-zz8lj\") pod \"community-operators-hk658\" (UID: \"a34c64f4-1191-4bb0-bb93-627119126f31\") " pod="openshift-marketplace/community-operators-hk658" Dec 10 07:35:14 crc kubenswrapper[4765]: I1210 07:35:14.080166 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a34c64f4-1191-4bb0-bb93-627119126f31-catalog-content\") pod \"community-operators-hk658\" (UID: \"a34c64f4-1191-4bb0-bb93-627119126f31\") " pod="openshift-marketplace/community-operators-hk658" Dec 10 07:35:14 crc kubenswrapper[4765]: I1210 07:35:14.080582 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a34c64f4-1191-4bb0-bb93-627119126f31-utilities\") pod \"community-operators-hk658\" (UID: \"a34c64f4-1191-4bb0-bb93-627119126f31\") " pod="openshift-marketplace/community-operators-hk658" Dec 10 07:35:14 crc kubenswrapper[4765]: I1210 07:35:14.080690 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zz8lj\" (UniqueName: \"kubernetes.io/projected/a34c64f4-1191-4bb0-bb93-627119126f31-kube-api-access-zz8lj\") pod \"community-operators-hk658\" (UID: \"a34c64f4-1191-4bb0-bb93-627119126f31\") " pod="openshift-marketplace/community-operators-hk658" Dec 10 07:35:14 crc kubenswrapper[4765]: I1210 07:35:14.080947 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a34c64f4-1191-4bb0-bb93-627119126f31-catalog-content\") pod \"community-operators-hk658\" (UID: \"a34c64f4-1191-4bb0-bb93-627119126f31\") " pod="openshift-marketplace/community-operators-hk658" Dec 10 07:35:14 crc kubenswrapper[4765]: I1210 07:35:14.081131 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a34c64f4-1191-4bb0-bb93-627119126f31-utilities\") pod \"community-operators-hk658\" (UID: \"a34c64f4-1191-4bb0-bb93-627119126f31\") " pod="openshift-marketplace/community-operators-hk658" Dec 10 07:35:14 crc kubenswrapper[4765]: I1210 07:35:14.111537 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zz8lj\" (UniqueName: \"kubernetes.io/projected/a34c64f4-1191-4bb0-bb93-627119126f31-kube-api-access-zz8lj\") pod \"community-operators-hk658\" (UID: \"a34c64f4-1191-4bb0-bb93-627119126f31\") " pod="openshift-marketplace/community-operators-hk658" Dec 10 07:35:14 crc kubenswrapper[4765]: I1210 07:35:14.199462 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hk658" Dec 10 07:35:14 crc kubenswrapper[4765]: I1210 07:35:14.745598 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hk658"] Dec 10 07:35:14 crc kubenswrapper[4765]: W1210 07:35:14.750724 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda34c64f4_1191_4bb0_bb93_627119126f31.slice/crio-7e2805156bb41afc81b5485bb8b19f72961233bda806525e84cea47dbf5604fd WatchSource:0}: Error finding container 7e2805156bb41afc81b5485bb8b19f72961233bda806525e84cea47dbf5604fd: Status 404 returned error can't find the container with id 7e2805156bb41afc81b5485bb8b19f72961233bda806525e84cea47dbf5604fd Dec 10 07:35:14 crc kubenswrapper[4765]: I1210 07:35:14.925731 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hk658" event={"ID":"a34c64f4-1191-4bb0-bb93-627119126f31","Type":"ContainerStarted","Data":"7e2805156bb41afc81b5485bb8b19f72961233bda806525e84cea47dbf5604fd"} Dec 10 07:35:15 crc kubenswrapper[4765]: I1210 07:35:15.935910 4765 generic.go:334] "Generic (PLEG): container finished" podID="a34c64f4-1191-4bb0-bb93-627119126f31" containerID="ffef1480a340ab89e32b3fdb38a4aaa3ce0d2a5aad01e533d474ee2c49808ac2" exitCode=0 Dec 10 07:35:15 crc kubenswrapper[4765]: I1210 07:35:15.935958 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hk658" event={"ID":"a34c64f4-1191-4bb0-bb93-627119126f31","Type":"ContainerDied","Data":"ffef1480a340ab89e32b3fdb38a4aaa3ce0d2a5aad01e533d474ee2c49808ac2"} Dec 10 07:35:17 crc kubenswrapper[4765]: I1210 07:35:17.956228 4765 generic.go:334] "Generic (PLEG): container finished" podID="a34c64f4-1191-4bb0-bb93-627119126f31" containerID="fa6f9e8959d83f69ecacd133184ccdda76937c257ccde74d08b8725255bc79a9" exitCode=0 Dec 10 07:35:17 crc kubenswrapper[4765]: I1210 07:35:17.956292 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hk658" event={"ID":"a34c64f4-1191-4bb0-bb93-627119126f31","Type":"ContainerDied","Data":"fa6f9e8959d83f69ecacd133184ccdda76937c257ccde74d08b8725255bc79a9"} Dec 10 07:35:19 crc kubenswrapper[4765]: I1210 07:35:19.975377 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hk658" event={"ID":"a34c64f4-1191-4bb0-bb93-627119126f31","Type":"ContainerStarted","Data":"0adcf8638b19ddd14763498ec1f21fbd0bdb67193d84642cbb330cbdaae6519f"} Dec 10 07:35:20 crc kubenswrapper[4765]: I1210 07:35:20.012813 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hk658" podStartSLOduration=3.385571793 podStartE2EDuration="7.012773719s" podCreationTimestamp="2025-12-10 07:35:13 +0000 UTC" firstStartedPulling="2025-12-10 07:35:15.938003494 +0000 UTC m=+2835.664668810" lastFinishedPulling="2025-12-10 07:35:19.56520542 +0000 UTC m=+2839.291870736" observedRunningTime="2025-12-10 07:35:20.005458491 +0000 UTC m=+2839.732123827" watchObservedRunningTime="2025-12-10 07:35:20.012773719 +0000 UTC m=+2839.739439035" Dec 10 07:35:24 crc kubenswrapper[4765]: I1210 07:35:24.199897 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hk658" Dec 10 07:35:24 crc kubenswrapper[4765]: I1210 07:35:24.200207 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hk658" Dec 10 07:35:24 crc kubenswrapper[4765]: I1210 07:35:24.252443 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hk658" Dec 10 07:35:25 crc kubenswrapper[4765]: I1210 07:35:25.057126 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hk658" Dec 10 07:35:25 crc kubenswrapper[4765]: I1210 07:35:25.109721 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hk658"] Dec 10 07:35:27 crc kubenswrapper[4765]: I1210 07:35:27.025988 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hk658" podUID="a34c64f4-1191-4bb0-bb93-627119126f31" containerName="registry-server" containerID="cri-o://0adcf8638b19ddd14763498ec1f21fbd0bdb67193d84642cbb330cbdaae6519f" gracePeriod=2 Dec 10 07:35:29 crc kubenswrapper[4765]: I1210 07:35:29.062858 4765 generic.go:334] "Generic (PLEG): container finished" podID="a34c64f4-1191-4bb0-bb93-627119126f31" containerID="0adcf8638b19ddd14763498ec1f21fbd0bdb67193d84642cbb330cbdaae6519f" exitCode=0 Dec 10 07:35:29 crc kubenswrapper[4765]: I1210 07:35:29.062950 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hk658" event={"ID":"a34c64f4-1191-4bb0-bb93-627119126f31","Type":"ContainerDied","Data":"0adcf8638b19ddd14763498ec1f21fbd0bdb67193d84642cbb330cbdaae6519f"} Dec 10 07:35:29 crc kubenswrapper[4765]: I1210 07:35:29.297921 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hk658" Dec 10 07:35:29 crc kubenswrapper[4765]: I1210 07:35:29.299775 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a34c64f4-1191-4bb0-bb93-627119126f31-catalog-content\") pod \"a34c64f4-1191-4bb0-bb93-627119126f31\" (UID: \"a34c64f4-1191-4bb0-bb93-627119126f31\") " Dec 10 07:35:29 crc kubenswrapper[4765]: I1210 07:35:29.299935 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zz8lj\" (UniqueName: \"kubernetes.io/projected/a34c64f4-1191-4bb0-bb93-627119126f31-kube-api-access-zz8lj\") pod \"a34c64f4-1191-4bb0-bb93-627119126f31\" (UID: \"a34c64f4-1191-4bb0-bb93-627119126f31\") " Dec 10 07:35:29 crc kubenswrapper[4765]: I1210 07:35:29.300021 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a34c64f4-1191-4bb0-bb93-627119126f31-utilities\") pod \"a34c64f4-1191-4bb0-bb93-627119126f31\" (UID: \"a34c64f4-1191-4bb0-bb93-627119126f31\") " Dec 10 07:35:29 crc kubenswrapper[4765]: I1210 07:35:29.307540 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a34c64f4-1191-4bb0-bb93-627119126f31-utilities" (OuterVolumeSpecName: "utilities") pod "a34c64f4-1191-4bb0-bb93-627119126f31" (UID: "a34c64f4-1191-4bb0-bb93-627119126f31"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:35:29 crc kubenswrapper[4765]: I1210 07:35:29.309590 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a34c64f4-1191-4bb0-bb93-627119126f31-kube-api-access-zz8lj" (OuterVolumeSpecName: "kube-api-access-zz8lj") pod "a34c64f4-1191-4bb0-bb93-627119126f31" (UID: "a34c64f4-1191-4bb0-bb93-627119126f31"). InnerVolumeSpecName "kube-api-access-zz8lj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:35:29 crc kubenswrapper[4765]: I1210 07:35:29.369780 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a34c64f4-1191-4bb0-bb93-627119126f31-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a34c64f4-1191-4bb0-bb93-627119126f31" (UID: "a34c64f4-1191-4bb0-bb93-627119126f31"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:35:29 crc kubenswrapper[4765]: I1210 07:35:29.402074 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zz8lj\" (UniqueName: \"kubernetes.io/projected/a34c64f4-1191-4bb0-bb93-627119126f31-kube-api-access-zz8lj\") on node \"crc\" DevicePath \"\"" Dec 10 07:35:29 crc kubenswrapper[4765]: I1210 07:35:29.402150 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a34c64f4-1191-4bb0-bb93-627119126f31-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:35:29 crc kubenswrapper[4765]: I1210 07:35:29.402164 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a34c64f4-1191-4bb0-bb93-627119126f31-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:35:30 crc kubenswrapper[4765]: I1210 07:35:30.075975 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hk658" event={"ID":"a34c64f4-1191-4bb0-bb93-627119126f31","Type":"ContainerDied","Data":"7e2805156bb41afc81b5485bb8b19f72961233bda806525e84cea47dbf5604fd"} Dec 10 07:35:30 crc kubenswrapper[4765]: I1210 07:35:30.076058 4765 scope.go:117] "RemoveContainer" containerID="0adcf8638b19ddd14763498ec1f21fbd0bdb67193d84642cbb330cbdaae6519f" Dec 10 07:35:30 crc kubenswrapper[4765]: I1210 07:35:30.076067 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hk658" Dec 10 07:35:30 crc kubenswrapper[4765]: I1210 07:35:30.114159 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hk658"] Dec 10 07:35:30 crc kubenswrapper[4765]: I1210 07:35:30.118332 4765 scope.go:117] "RemoveContainer" containerID="fa6f9e8959d83f69ecacd133184ccdda76937c257ccde74d08b8725255bc79a9" Dec 10 07:35:30 crc kubenswrapper[4765]: I1210 07:35:30.124866 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hk658"] Dec 10 07:35:30 crc kubenswrapper[4765]: I1210 07:35:30.149261 4765 scope.go:117] "RemoveContainer" containerID="ffef1480a340ab89e32b3fdb38a4aaa3ce0d2a5aad01e533d474ee2c49808ac2" Dec 10 07:35:30 crc kubenswrapper[4765]: I1210 07:35:30.599367 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a34c64f4-1191-4bb0-bb93-627119126f31" path="/var/lib/kubelet/pods/a34c64f4-1191-4bb0-bb93-627119126f31/volumes" Dec 10 07:36:34 crc kubenswrapper[4765]: I1210 07:36:34.049748 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:36:34 crc kubenswrapper[4765]: I1210 07:36:34.050357 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:36:34 crc kubenswrapper[4765]: I1210 07:36:34.656021 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-km5jt"] Dec 10 07:36:34 crc kubenswrapper[4765]: E1210 07:36:34.656465 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a34c64f4-1191-4bb0-bb93-627119126f31" containerName="extract-utilities" Dec 10 07:36:34 crc kubenswrapper[4765]: I1210 07:36:34.656487 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a34c64f4-1191-4bb0-bb93-627119126f31" containerName="extract-utilities" Dec 10 07:36:34 crc kubenswrapper[4765]: E1210 07:36:34.656516 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a34c64f4-1191-4bb0-bb93-627119126f31" containerName="extract-content" Dec 10 07:36:34 crc kubenswrapper[4765]: I1210 07:36:34.656525 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a34c64f4-1191-4bb0-bb93-627119126f31" containerName="extract-content" Dec 10 07:36:34 crc kubenswrapper[4765]: E1210 07:36:34.656550 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a34c64f4-1191-4bb0-bb93-627119126f31" containerName="registry-server" Dec 10 07:36:34 crc kubenswrapper[4765]: I1210 07:36:34.656558 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a34c64f4-1191-4bb0-bb93-627119126f31" containerName="registry-server" Dec 10 07:36:34 crc kubenswrapper[4765]: I1210 07:36:34.656794 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="a34c64f4-1191-4bb0-bb93-627119126f31" containerName="registry-server" Dec 10 07:36:34 crc kubenswrapper[4765]: I1210 07:36:34.658188 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-km5jt" Dec 10 07:36:34 crc kubenswrapper[4765]: I1210 07:36:34.661564 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-km5jt"] Dec 10 07:36:34 crc kubenswrapper[4765]: I1210 07:36:34.781505 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/163bd9d4-8584-4a44-9433-76987d748d3f-utilities\") pod \"certified-operators-km5jt\" (UID: \"163bd9d4-8584-4a44-9433-76987d748d3f\") " pod="openshift-marketplace/certified-operators-km5jt" Dec 10 07:36:34 crc kubenswrapper[4765]: I1210 07:36:34.781585 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/163bd9d4-8584-4a44-9433-76987d748d3f-catalog-content\") pod \"certified-operators-km5jt\" (UID: \"163bd9d4-8584-4a44-9433-76987d748d3f\") " pod="openshift-marketplace/certified-operators-km5jt" Dec 10 07:36:34 crc kubenswrapper[4765]: I1210 07:36:34.781847 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bcm5\" (UniqueName: \"kubernetes.io/projected/163bd9d4-8584-4a44-9433-76987d748d3f-kube-api-access-7bcm5\") pod \"certified-operators-km5jt\" (UID: \"163bd9d4-8584-4a44-9433-76987d748d3f\") " pod="openshift-marketplace/certified-operators-km5jt" Dec 10 07:36:34 crc kubenswrapper[4765]: I1210 07:36:34.883957 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bcm5\" (UniqueName: \"kubernetes.io/projected/163bd9d4-8584-4a44-9433-76987d748d3f-kube-api-access-7bcm5\") pod \"certified-operators-km5jt\" (UID: \"163bd9d4-8584-4a44-9433-76987d748d3f\") " pod="openshift-marketplace/certified-operators-km5jt" Dec 10 07:36:34 crc kubenswrapper[4765]: I1210 07:36:34.884115 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/163bd9d4-8584-4a44-9433-76987d748d3f-utilities\") pod \"certified-operators-km5jt\" (UID: \"163bd9d4-8584-4a44-9433-76987d748d3f\") " pod="openshift-marketplace/certified-operators-km5jt" Dec 10 07:36:34 crc kubenswrapper[4765]: I1210 07:36:34.884164 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/163bd9d4-8584-4a44-9433-76987d748d3f-catalog-content\") pod \"certified-operators-km5jt\" (UID: \"163bd9d4-8584-4a44-9433-76987d748d3f\") " pod="openshift-marketplace/certified-operators-km5jt" Dec 10 07:36:34 crc kubenswrapper[4765]: I1210 07:36:34.884781 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/163bd9d4-8584-4a44-9433-76987d748d3f-catalog-content\") pod \"certified-operators-km5jt\" (UID: \"163bd9d4-8584-4a44-9433-76987d748d3f\") " pod="openshift-marketplace/certified-operators-km5jt" Dec 10 07:36:34 crc kubenswrapper[4765]: I1210 07:36:34.884896 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/163bd9d4-8584-4a44-9433-76987d748d3f-utilities\") pod \"certified-operators-km5jt\" (UID: \"163bd9d4-8584-4a44-9433-76987d748d3f\") " pod="openshift-marketplace/certified-operators-km5jt" Dec 10 07:36:34 crc kubenswrapper[4765]: I1210 07:36:34.906289 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bcm5\" (UniqueName: \"kubernetes.io/projected/163bd9d4-8584-4a44-9433-76987d748d3f-kube-api-access-7bcm5\") pod \"certified-operators-km5jt\" (UID: \"163bd9d4-8584-4a44-9433-76987d748d3f\") " pod="openshift-marketplace/certified-operators-km5jt" Dec 10 07:36:35 crc kubenswrapper[4765]: I1210 07:36:35.010018 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-km5jt" Dec 10 07:36:35 crc kubenswrapper[4765]: I1210 07:36:35.482968 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-km5jt"] Dec 10 07:36:35 crc kubenswrapper[4765]: I1210 07:36:35.578963 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-km5jt" event={"ID":"163bd9d4-8584-4a44-9433-76987d748d3f","Type":"ContainerStarted","Data":"082d198a59c5b6a60e9ff25d6c1a9667a91d31d7b3721ab9992724e33f46781e"} Dec 10 07:36:36 crc kubenswrapper[4765]: I1210 07:36:36.589555 4765 generic.go:334] "Generic (PLEG): container finished" podID="163bd9d4-8584-4a44-9433-76987d748d3f" containerID="b6cc0c5019dae11379152d154876f62ffc45ff460de4933912398e253b083e82" exitCode=0 Dec 10 07:36:36 crc kubenswrapper[4765]: I1210 07:36:36.598583 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-km5jt" event={"ID":"163bd9d4-8584-4a44-9433-76987d748d3f","Type":"ContainerDied","Data":"b6cc0c5019dae11379152d154876f62ffc45ff460de4933912398e253b083e82"} Dec 10 07:36:39 crc kubenswrapper[4765]: I1210 07:36:39.614949 4765 generic.go:334] "Generic (PLEG): container finished" podID="163bd9d4-8584-4a44-9433-76987d748d3f" containerID="74c946717c00fcc76421a91506f3c89222f231019b19562a0bb2b17dfa53209b" exitCode=0 Dec 10 07:36:39 crc kubenswrapper[4765]: I1210 07:36:39.615059 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-km5jt" event={"ID":"163bd9d4-8584-4a44-9433-76987d748d3f","Type":"ContainerDied","Data":"74c946717c00fcc76421a91506f3c89222f231019b19562a0bb2b17dfa53209b"} Dec 10 07:36:41 crc kubenswrapper[4765]: I1210 07:36:41.647671 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-km5jt" event={"ID":"163bd9d4-8584-4a44-9433-76987d748d3f","Type":"ContainerStarted","Data":"b7ac32fcff9d80e37f9a4f6cc3ceccbfa835f28a04dd55b64051a661cdb08227"} Dec 10 07:36:41 crc kubenswrapper[4765]: I1210 07:36:41.672046 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-km5jt" podStartSLOduration=3.698450175 podStartE2EDuration="7.672027525s" podCreationTimestamp="2025-12-10 07:36:34 +0000 UTC" firstStartedPulling="2025-12-10 07:36:36.590719296 +0000 UTC m=+2916.317384612" lastFinishedPulling="2025-12-10 07:36:40.564296646 +0000 UTC m=+2920.290961962" observedRunningTime="2025-12-10 07:36:41.665939381 +0000 UTC m=+2921.392604697" watchObservedRunningTime="2025-12-10 07:36:41.672027525 +0000 UTC m=+2921.398692841" Dec 10 07:36:45 crc kubenswrapper[4765]: I1210 07:36:45.010659 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-km5jt" Dec 10 07:36:45 crc kubenswrapper[4765]: I1210 07:36:45.014309 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-km5jt" Dec 10 07:36:45 crc kubenswrapper[4765]: I1210 07:36:45.055622 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-km5jt" Dec 10 07:36:45 crc kubenswrapper[4765]: I1210 07:36:45.722458 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-km5jt" Dec 10 07:36:48 crc kubenswrapper[4765]: I1210 07:36:48.040445 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-km5jt"] Dec 10 07:36:48 crc kubenswrapper[4765]: I1210 07:36:48.699673 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-km5jt" podUID="163bd9d4-8584-4a44-9433-76987d748d3f" containerName="registry-server" containerID="cri-o://b7ac32fcff9d80e37f9a4f6cc3ceccbfa835f28a04dd55b64051a661cdb08227" gracePeriod=2 Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.627868 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-km5jt" Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.710038 4765 generic.go:334] "Generic (PLEG): container finished" podID="163bd9d4-8584-4a44-9433-76987d748d3f" containerID="b7ac32fcff9d80e37f9a4f6cc3ceccbfa835f28a04dd55b64051a661cdb08227" exitCode=0 Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.710110 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-km5jt" event={"ID":"163bd9d4-8584-4a44-9433-76987d748d3f","Type":"ContainerDied","Data":"b7ac32fcff9d80e37f9a4f6cc3ceccbfa835f28a04dd55b64051a661cdb08227"} Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.710147 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-km5jt" event={"ID":"163bd9d4-8584-4a44-9433-76987d748d3f","Type":"ContainerDied","Data":"082d198a59c5b6a60e9ff25d6c1a9667a91d31d7b3721ab9992724e33f46781e"} Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.710168 4765 scope.go:117] "RemoveContainer" containerID="b7ac32fcff9d80e37f9a4f6cc3ceccbfa835f28a04dd55b64051a661cdb08227" Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.710331 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-km5jt" Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.723262 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bcm5\" (UniqueName: \"kubernetes.io/projected/163bd9d4-8584-4a44-9433-76987d748d3f-kube-api-access-7bcm5\") pod \"163bd9d4-8584-4a44-9433-76987d748d3f\" (UID: \"163bd9d4-8584-4a44-9433-76987d748d3f\") " Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.723409 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/163bd9d4-8584-4a44-9433-76987d748d3f-catalog-content\") pod \"163bd9d4-8584-4a44-9433-76987d748d3f\" (UID: \"163bd9d4-8584-4a44-9433-76987d748d3f\") " Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.723547 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/163bd9d4-8584-4a44-9433-76987d748d3f-utilities\") pod \"163bd9d4-8584-4a44-9433-76987d748d3f\" (UID: \"163bd9d4-8584-4a44-9433-76987d748d3f\") " Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.724910 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/163bd9d4-8584-4a44-9433-76987d748d3f-utilities" (OuterVolumeSpecName: "utilities") pod "163bd9d4-8584-4a44-9433-76987d748d3f" (UID: "163bd9d4-8584-4a44-9433-76987d748d3f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.732749 4765 scope.go:117] "RemoveContainer" containerID="74c946717c00fcc76421a91506f3c89222f231019b19562a0bb2b17dfa53209b" Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.734243 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/163bd9d4-8584-4a44-9433-76987d748d3f-kube-api-access-7bcm5" (OuterVolumeSpecName: "kube-api-access-7bcm5") pod "163bd9d4-8584-4a44-9433-76987d748d3f" (UID: "163bd9d4-8584-4a44-9433-76987d748d3f"). InnerVolumeSpecName "kube-api-access-7bcm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.776255 4765 scope.go:117] "RemoveContainer" containerID="b6cc0c5019dae11379152d154876f62ffc45ff460de4933912398e253b083e82" Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.785217 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/163bd9d4-8584-4a44-9433-76987d748d3f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "163bd9d4-8584-4a44-9433-76987d748d3f" (UID: "163bd9d4-8584-4a44-9433-76987d748d3f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.802172 4765 scope.go:117] "RemoveContainer" containerID="b7ac32fcff9d80e37f9a4f6cc3ceccbfa835f28a04dd55b64051a661cdb08227" Dec 10 07:36:49 crc kubenswrapper[4765]: E1210 07:36:49.802645 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7ac32fcff9d80e37f9a4f6cc3ceccbfa835f28a04dd55b64051a661cdb08227\": container with ID starting with b7ac32fcff9d80e37f9a4f6cc3ceccbfa835f28a04dd55b64051a661cdb08227 not found: ID does not exist" containerID="b7ac32fcff9d80e37f9a4f6cc3ceccbfa835f28a04dd55b64051a661cdb08227" Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.802694 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7ac32fcff9d80e37f9a4f6cc3ceccbfa835f28a04dd55b64051a661cdb08227"} err="failed to get container status \"b7ac32fcff9d80e37f9a4f6cc3ceccbfa835f28a04dd55b64051a661cdb08227\": rpc error: code = NotFound desc = could not find container \"b7ac32fcff9d80e37f9a4f6cc3ceccbfa835f28a04dd55b64051a661cdb08227\": container with ID starting with b7ac32fcff9d80e37f9a4f6cc3ceccbfa835f28a04dd55b64051a661cdb08227 not found: ID does not exist" Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.802760 4765 scope.go:117] "RemoveContainer" containerID="74c946717c00fcc76421a91506f3c89222f231019b19562a0bb2b17dfa53209b" Dec 10 07:36:49 crc kubenswrapper[4765]: E1210 07:36:49.803008 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74c946717c00fcc76421a91506f3c89222f231019b19562a0bb2b17dfa53209b\": container with ID starting with 74c946717c00fcc76421a91506f3c89222f231019b19562a0bb2b17dfa53209b not found: ID does not exist" containerID="74c946717c00fcc76421a91506f3c89222f231019b19562a0bb2b17dfa53209b" Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.803037 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74c946717c00fcc76421a91506f3c89222f231019b19562a0bb2b17dfa53209b"} err="failed to get container status \"74c946717c00fcc76421a91506f3c89222f231019b19562a0bb2b17dfa53209b\": rpc error: code = NotFound desc = could not find container \"74c946717c00fcc76421a91506f3c89222f231019b19562a0bb2b17dfa53209b\": container with ID starting with 74c946717c00fcc76421a91506f3c89222f231019b19562a0bb2b17dfa53209b not found: ID does not exist" Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.803055 4765 scope.go:117] "RemoveContainer" containerID="b6cc0c5019dae11379152d154876f62ffc45ff460de4933912398e253b083e82" Dec 10 07:36:49 crc kubenswrapper[4765]: E1210 07:36:49.803389 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6cc0c5019dae11379152d154876f62ffc45ff460de4933912398e253b083e82\": container with ID starting with b6cc0c5019dae11379152d154876f62ffc45ff460de4933912398e253b083e82 not found: ID does not exist" containerID="b6cc0c5019dae11379152d154876f62ffc45ff460de4933912398e253b083e82" Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.803420 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6cc0c5019dae11379152d154876f62ffc45ff460de4933912398e253b083e82"} err="failed to get container status \"b6cc0c5019dae11379152d154876f62ffc45ff460de4933912398e253b083e82\": rpc error: code = NotFound desc = could not find container \"b6cc0c5019dae11379152d154876f62ffc45ff460de4933912398e253b083e82\": container with ID starting with b6cc0c5019dae11379152d154876f62ffc45ff460de4933912398e253b083e82 not found: ID does not exist" Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.824846 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/163bd9d4-8584-4a44-9433-76987d748d3f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.824894 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/163bd9d4-8584-4a44-9433-76987d748d3f-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:36:49 crc kubenswrapper[4765]: I1210 07:36:49.824908 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bcm5\" (UniqueName: \"kubernetes.io/projected/163bd9d4-8584-4a44-9433-76987d748d3f-kube-api-access-7bcm5\") on node \"crc\" DevicePath \"\"" Dec 10 07:36:50 crc kubenswrapper[4765]: I1210 07:36:50.047171 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-km5jt"] Dec 10 07:36:50 crc kubenswrapper[4765]: I1210 07:36:50.052788 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-km5jt"] Dec 10 07:36:50 crc kubenswrapper[4765]: I1210 07:36:50.598868 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="163bd9d4-8584-4a44-9433-76987d748d3f" path="/var/lib/kubelet/pods/163bd9d4-8584-4a44-9433-76987d748d3f/volumes" Dec 10 07:37:04 crc kubenswrapper[4765]: I1210 07:37:04.049237 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:37:04 crc kubenswrapper[4765]: I1210 07:37:04.049915 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:37:34 crc kubenswrapper[4765]: I1210 07:37:34.050121 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:37:34 crc kubenswrapper[4765]: I1210 07:37:34.050772 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:37:34 crc kubenswrapper[4765]: I1210 07:37:34.050834 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 07:37:34 crc kubenswrapper[4765]: I1210 07:37:34.051620 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97"} pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 07:37:34 crc kubenswrapper[4765]: I1210 07:37:34.051692 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" containerID="cri-o://f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" gracePeriod=600 Dec 10 07:37:34 crc kubenswrapper[4765]: E1210 07:37:34.186817 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:37:35 crc kubenswrapper[4765]: I1210 07:37:35.111281 4765 generic.go:334] "Generic (PLEG): container finished" podID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" exitCode=0 Dec 10 07:37:35 crc kubenswrapper[4765]: I1210 07:37:35.111366 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerDied","Data":"f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97"} Dec 10 07:37:35 crc kubenswrapper[4765]: I1210 07:37:35.111650 4765 scope.go:117] "RemoveContainer" containerID="936bdb47f341f43c2b82cb52710879031640eca8fb8ab90e2134c80f74e37ff0" Dec 10 07:37:35 crc kubenswrapper[4765]: I1210 07:37:35.112607 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:37:35 crc kubenswrapper[4765]: E1210 07:37:35.112886 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:37:47 crc kubenswrapper[4765]: I1210 07:37:47.589171 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:37:47 crc kubenswrapper[4765]: E1210 07:37:47.589920 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:38:01 crc kubenswrapper[4765]: I1210 07:38:01.588641 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:38:01 crc kubenswrapper[4765]: E1210 07:38:01.589403 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:38:12 crc kubenswrapper[4765]: I1210 07:38:12.589216 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:38:12 crc kubenswrapper[4765]: E1210 07:38:12.591101 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:38:23 crc kubenswrapper[4765]: I1210 07:38:23.589559 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:38:23 crc kubenswrapper[4765]: E1210 07:38:23.592053 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:38:28 crc kubenswrapper[4765]: I1210 07:38:28.438147 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4x4rr"] Dec 10 07:38:28 crc kubenswrapper[4765]: E1210 07:38:28.439044 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="163bd9d4-8584-4a44-9433-76987d748d3f" containerName="registry-server" Dec 10 07:38:28 crc kubenswrapper[4765]: I1210 07:38:28.439062 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="163bd9d4-8584-4a44-9433-76987d748d3f" containerName="registry-server" Dec 10 07:38:28 crc kubenswrapper[4765]: E1210 07:38:28.439135 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="163bd9d4-8584-4a44-9433-76987d748d3f" containerName="extract-content" Dec 10 07:38:28 crc kubenswrapper[4765]: I1210 07:38:28.439146 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="163bd9d4-8584-4a44-9433-76987d748d3f" containerName="extract-content" Dec 10 07:38:28 crc kubenswrapper[4765]: E1210 07:38:28.439159 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="163bd9d4-8584-4a44-9433-76987d748d3f" containerName="extract-utilities" Dec 10 07:38:28 crc kubenswrapper[4765]: I1210 07:38:28.439167 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="163bd9d4-8584-4a44-9433-76987d748d3f" containerName="extract-utilities" Dec 10 07:38:28 crc kubenswrapper[4765]: I1210 07:38:28.439355 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="163bd9d4-8584-4a44-9433-76987d748d3f" containerName="registry-server" Dec 10 07:38:28 crc kubenswrapper[4765]: I1210 07:38:28.441198 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4x4rr" Dec 10 07:38:28 crc kubenswrapper[4765]: I1210 07:38:28.449611 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4x4rr"] Dec 10 07:38:28 crc kubenswrapper[4765]: I1210 07:38:28.594765 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d-catalog-content\") pod \"redhat-marketplace-4x4rr\" (UID: \"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d\") " pod="openshift-marketplace/redhat-marketplace-4x4rr" Dec 10 07:38:28 crc kubenswrapper[4765]: I1210 07:38:28.594808 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k54lg\" (UniqueName: \"kubernetes.io/projected/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d-kube-api-access-k54lg\") pod \"redhat-marketplace-4x4rr\" (UID: \"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d\") " pod="openshift-marketplace/redhat-marketplace-4x4rr" Dec 10 07:38:28 crc kubenswrapper[4765]: I1210 07:38:28.594832 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d-utilities\") pod \"redhat-marketplace-4x4rr\" (UID: \"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d\") " pod="openshift-marketplace/redhat-marketplace-4x4rr" Dec 10 07:38:28 crc kubenswrapper[4765]: I1210 07:38:28.696775 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d-catalog-content\") pod \"redhat-marketplace-4x4rr\" (UID: \"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d\") " pod="openshift-marketplace/redhat-marketplace-4x4rr" Dec 10 07:38:28 crc kubenswrapper[4765]: I1210 07:38:28.697160 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k54lg\" (UniqueName: \"kubernetes.io/projected/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d-kube-api-access-k54lg\") pod \"redhat-marketplace-4x4rr\" (UID: \"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d\") " pod="openshift-marketplace/redhat-marketplace-4x4rr" Dec 10 07:38:28 crc kubenswrapper[4765]: I1210 07:38:28.697197 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d-utilities\") pod \"redhat-marketplace-4x4rr\" (UID: \"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d\") " pod="openshift-marketplace/redhat-marketplace-4x4rr" Dec 10 07:38:28 crc kubenswrapper[4765]: I1210 07:38:28.697521 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d-catalog-content\") pod \"redhat-marketplace-4x4rr\" (UID: \"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d\") " pod="openshift-marketplace/redhat-marketplace-4x4rr" Dec 10 07:38:28 crc kubenswrapper[4765]: I1210 07:38:28.697753 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d-utilities\") pod \"redhat-marketplace-4x4rr\" (UID: \"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d\") " pod="openshift-marketplace/redhat-marketplace-4x4rr" Dec 10 07:38:28 crc kubenswrapper[4765]: I1210 07:38:28.721323 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k54lg\" (UniqueName: \"kubernetes.io/projected/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d-kube-api-access-k54lg\") pod \"redhat-marketplace-4x4rr\" (UID: \"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d\") " pod="openshift-marketplace/redhat-marketplace-4x4rr" Dec 10 07:38:28 crc kubenswrapper[4765]: I1210 07:38:28.764484 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4x4rr" Dec 10 07:38:29 crc kubenswrapper[4765]: I1210 07:38:29.275696 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4x4rr"] Dec 10 07:38:29 crc kubenswrapper[4765]: I1210 07:38:29.538899 4765 generic.go:334] "Generic (PLEG): container finished" podID="a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d" containerID="b16ec069f1cde934cab07e5619851fbe424199bee078cc7a85df0f10b2c57a47" exitCode=0 Dec 10 07:38:29 crc kubenswrapper[4765]: I1210 07:38:29.539014 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4x4rr" event={"ID":"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d","Type":"ContainerDied","Data":"b16ec069f1cde934cab07e5619851fbe424199bee078cc7a85df0f10b2c57a47"} Dec 10 07:38:29 crc kubenswrapper[4765]: I1210 07:38:29.539414 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4x4rr" event={"ID":"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d","Type":"ContainerStarted","Data":"1babde8e9ae950e5eb8431fbe98f2a8aba60022c0d07c0f1451824b1661c8982"} Dec 10 07:38:29 crc kubenswrapper[4765]: I1210 07:38:29.541301 4765 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 07:38:30 crc kubenswrapper[4765]: I1210 07:38:30.549736 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4x4rr" event={"ID":"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d","Type":"ContainerStarted","Data":"a23a5d6ffb2bb7fb195e14e913b5439ecb77905761e874624fcbd2d95eb9924c"} Dec 10 07:38:31 crc kubenswrapper[4765]: I1210 07:38:31.559514 4765 generic.go:334] "Generic (PLEG): container finished" podID="a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d" containerID="a23a5d6ffb2bb7fb195e14e913b5439ecb77905761e874624fcbd2d95eb9924c" exitCode=0 Dec 10 07:38:31 crc kubenswrapper[4765]: I1210 07:38:31.559611 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4x4rr" event={"ID":"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d","Type":"ContainerDied","Data":"a23a5d6ffb2bb7fb195e14e913b5439ecb77905761e874624fcbd2d95eb9924c"} Dec 10 07:38:32 crc kubenswrapper[4765]: I1210 07:38:32.568909 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4x4rr" event={"ID":"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d","Type":"ContainerStarted","Data":"e274a818233bd03aeb2fd956cc33fe7f256b53bf304bdfcf3053a5793aa734de"} Dec 10 07:38:32 crc kubenswrapper[4765]: I1210 07:38:32.593113 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4x4rr" podStartSLOduration=1.8333827089999999 podStartE2EDuration="4.593096646s" podCreationTimestamp="2025-12-10 07:38:28 +0000 UTC" firstStartedPulling="2025-12-10 07:38:29.541033286 +0000 UTC m=+3029.267698602" lastFinishedPulling="2025-12-10 07:38:32.300747223 +0000 UTC m=+3032.027412539" observedRunningTime="2025-12-10 07:38:32.586226491 +0000 UTC m=+3032.312891817" watchObservedRunningTime="2025-12-10 07:38:32.593096646 +0000 UTC m=+3032.319761962" Dec 10 07:38:38 crc kubenswrapper[4765]: I1210 07:38:38.589237 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:38:38 crc kubenswrapper[4765]: E1210 07:38:38.589844 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:38:38 crc kubenswrapper[4765]: I1210 07:38:38.765193 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4x4rr" Dec 10 07:38:38 crc kubenswrapper[4765]: I1210 07:38:38.765570 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4x4rr" Dec 10 07:38:38 crc kubenswrapper[4765]: I1210 07:38:38.806450 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4x4rr" Dec 10 07:38:39 crc kubenswrapper[4765]: I1210 07:38:39.661399 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4x4rr" Dec 10 07:38:40 crc kubenswrapper[4765]: I1210 07:38:40.037254 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4x4rr"] Dec 10 07:38:41 crc kubenswrapper[4765]: I1210 07:38:41.632276 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4x4rr" podUID="a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d" containerName="registry-server" containerID="cri-o://e274a818233bd03aeb2fd956cc33fe7f256b53bf304bdfcf3053a5793aa734de" gracePeriod=2 Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.502160 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4x4rr" Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.592133 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k54lg\" (UniqueName: \"kubernetes.io/projected/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d-kube-api-access-k54lg\") pod \"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d\" (UID: \"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d\") " Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.592218 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d-utilities\") pod \"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d\" (UID: \"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d\") " Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.592364 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d-catalog-content\") pod \"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d\" (UID: \"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d\") " Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.594131 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d-utilities" (OuterVolumeSpecName: "utilities") pod "a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d" (UID: "a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.598378 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d-kube-api-access-k54lg" (OuterVolumeSpecName: "kube-api-access-k54lg") pod "a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d" (UID: "a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d"). InnerVolumeSpecName "kube-api-access-k54lg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.636225 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d" (UID: "a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.646223 4765 generic.go:334] "Generic (PLEG): container finished" podID="a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d" containerID="e274a818233bd03aeb2fd956cc33fe7f256b53bf304bdfcf3053a5793aa734de" exitCode=0 Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.646306 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4x4rr" Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.646294 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4x4rr" event={"ID":"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d","Type":"ContainerDied","Data":"e274a818233bd03aeb2fd956cc33fe7f256b53bf304bdfcf3053a5793aa734de"} Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.646802 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4x4rr" event={"ID":"a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d","Type":"ContainerDied","Data":"1babde8e9ae950e5eb8431fbe98f2a8aba60022c0d07c0f1451824b1661c8982"} Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.646889 4765 scope.go:117] "RemoveContainer" containerID="e274a818233bd03aeb2fd956cc33fe7f256b53bf304bdfcf3053a5793aa734de" Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.667065 4765 scope.go:117] "RemoveContainer" containerID="a23a5d6ffb2bb7fb195e14e913b5439ecb77905761e874624fcbd2d95eb9924c" Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.687409 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4x4rr"] Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.694205 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4x4rr"] Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.695020 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.695044 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k54lg\" (UniqueName: \"kubernetes.io/projected/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d-kube-api-access-k54lg\") on node \"crc\" DevicePath \"\"" Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.695058 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.698347 4765 scope.go:117] "RemoveContainer" containerID="b16ec069f1cde934cab07e5619851fbe424199bee078cc7a85df0f10b2c57a47" Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.717748 4765 scope.go:117] "RemoveContainer" containerID="e274a818233bd03aeb2fd956cc33fe7f256b53bf304bdfcf3053a5793aa734de" Dec 10 07:38:42 crc kubenswrapper[4765]: E1210 07:38:42.718283 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e274a818233bd03aeb2fd956cc33fe7f256b53bf304bdfcf3053a5793aa734de\": container with ID starting with e274a818233bd03aeb2fd956cc33fe7f256b53bf304bdfcf3053a5793aa734de not found: ID does not exist" containerID="e274a818233bd03aeb2fd956cc33fe7f256b53bf304bdfcf3053a5793aa734de" Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.718322 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e274a818233bd03aeb2fd956cc33fe7f256b53bf304bdfcf3053a5793aa734de"} err="failed to get container status \"e274a818233bd03aeb2fd956cc33fe7f256b53bf304bdfcf3053a5793aa734de\": rpc error: code = NotFound desc = could not find container \"e274a818233bd03aeb2fd956cc33fe7f256b53bf304bdfcf3053a5793aa734de\": container with ID starting with e274a818233bd03aeb2fd956cc33fe7f256b53bf304bdfcf3053a5793aa734de not found: ID does not exist" Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.718347 4765 scope.go:117] "RemoveContainer" containerID="a23a5d6ffb2bb7fb195e14e913b5439ecb77905761e874624fcbd2d95eb9924c" Dec 10 07:38:42 crc kubenswrapper[4765]: E1210 07:38:42.718777 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a23a5d6ffb2bb7fb195e14e913b5439ecb77905761e874624fcbd2d95eb9924c\": container with ID starting with a23a5d6ffb2bb7fb195e14e913b5439ecb77905761e874624fcbd2d95eb9924c not found: ID does not exist" containerID="a23a5d6ffb2bb7fb195e14e913b5439ecb77905761e874624fcbd2d95eb9924c" Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.718806 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a23a5d6ffb2bb7fb195e14e913b5439ecb77905761e874624fcbd2d95eb9924c"} err="failed to get container status \"a23a5d6ffb2bb7fb195e14e913b5439ecb77905761e874624fcbd2d95eb9924c\": rpc error: code = NotFound desc = could not find container \"a23a5d6ffb2bb7fb195e14e913b5439ecb77905761e874624fcbd2d95eb9924c\": container with ID starting with a23a5d6ffb2bb7fb195e14e913b5439ecb77905761e874624fcbd2d95eb9924c not found: ID does not exist" Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.718822 4765 scope.go:117] "RemoveContainer" containerID="b16ec069f1cde934cab07e5619851fbe424199bee078cc7a85df0f10b2c57a47" Dec 10 07:38:42 crc kubenswrapper[4765]: E1210 07:38:42.719416 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b16ec069f1cde934cab07e5619851fbe424199bee078cc7a85df0f10b2c57a47\": container with ID starting with b16ec069f1cde934cab07e5619851fbe424199bee078cc7a85df0f10b2c57a47 not found: ID does not exist" containerID="b16ec069f1cde934cab07e5619851fbe424199bee078cc7a85df0f10b2c57a47" Dec 10 07:38:42 crc kubenswrapper[4765]: I1210 07:38:42.719448 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b16ec069f1cde934cab07e5619851fbe424199bee078cc7a85df0f10b2c57a47"} err="failed to get container status \"b16ec069f1cde934cab07e5619851fbe424199bee078cc7a85df0f10b2c57a47\": rpc error: code = NotFound desc = could not find container \"b16ec069f1cde934cab07e5619851fbe424199bee078cc7a85df0f10b2c57a47\": container with ID starting with b16ec069f1cde934cab07e5619851fbe424199bee078cc7a85df0f10b2c57a47 not found: ID does not exist" Dec 10 07:38:44 crc kubenswrapper[4765]: I1210 07:38:44.598430 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d" path="/var/lib/kubelet/pods/a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d/volumes" Dec 10 07:38:51 crc kubenswrapper[4765]: I1210 07:38:51.589642 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:38:51 crc kubenswrapper[4765]: E1210 07:38:51.590318 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:39:05 crc kubenswrapper[4765]: I1210 07:39:05.589304 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:39:05 crc kubenswrapper[4765]: E1210 07:39:05.590242 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:39:16 crc kubenswrapper[4765]: I1210 07:39:16.590045 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:39:16 crc kubenswrapper[4765]: E1210 07:39:16.591338 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:39:29 crc kubenswrapper[4765]: I1210 07:39:29.589117 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:39:29 crc kubenswrapper[4765]: E1210 07:39:29.589798 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:39:40 crc kubenswrapper[4765]: I1210 07:39:40.596719 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:39:40 crc kubenswrapper[4765]: E1210 07:39:40.597526 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:39:53 crc kubenswrapper[4765]: I1210 07:39:53.589544 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:39:53 crc kubenswrapper[4765]: E1210 07:39:53.591727 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:40:07 crc kubenswrapper[4765]: I1210 07:40:07.588722 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:40:07 crc kubenswrapper[4765]: E1210 07:40:07.589363 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:40:22 crc kubenswrapper[4765]: I1210 07:40:22.591184 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:40:22 crc kubenswrapper[4765]: E1210 07:40:22.593205 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:40:35 crc kubenswrapper[4765]: I1210 07:40:35.589401 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:40:35 crc kubenswrapper[4765]: E1210 07:40:35.590147 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:40:46 crc kubenswrapper[4765]: I1210 07:40:46.589815 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:40:46 crc kubenswrapper[4765]: E1210 07:40:46.593451 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:40:58 crc kubenswrapper[4765]: I1210 07:40:58.589000 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:40:58 crc kubenswrapper[4765]: E1210 07:40:58.589823 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:41:12 crc kubenswrapper[4765]: I1210 07:41:12.590037 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:41:12 crc kubenswrapper[4765]: E1210 07:41:12.591139 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:41:27 crc kubenswrapper[4765]: I1210 07:41:27.589297 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:41:27 crc kubenswrapper[4765]: E1210 07:41:27.590073 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:41:38 crc kubenswrapper[4765]: I1210 07:41:38.588869 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:41:38 crc kubenswrapper[4765]: E1210 07:41:38.589865 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:41:50 crc kubenswrapper[4765]: I1210 07:41:50.593470 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:41:50 crc kubenswrapper[4765]: E1210 07:41:50.594560 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:42:03 crc kubenswrapper[4765]: I1210 07:42:03.589399 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:42:03 crc kubenswrapper[4765]: E1210 07:42:03.590391 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:42:17 crc kubenswrapper[4765]: I1210 07:42:17.589561 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:42:17 crc kubenswrapper[4765]: E1210 07:42:17.590308 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:42:30 crc kubenswrapper[4765]: I1210 07:42:30.594064 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:42:30 crc kubenswrapper[4765]: E1210 07:42:30.594749 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:42:44 crc kubenswrapper[4765]: I1210 07:42:44.589685 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:42:45 crc kubenswrapper[4765]: I1210 07:42:45.746506 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"96e0732dc61f42f67cdd9eb5e4cd3d16738757319ed34de6ac508062ca319715"} Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.141028 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw"] Dec 10 07:45:00 crc kubenswrapper[4765]: E1210 07:45:00.142051 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d" containerName="extract-utilities" Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.142068 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d" containerName="extract-utilities" Dec 10 07:45:00 crc kubenswrapper[4765]: E1210 07:45:00.142140 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d" containerName="registry-server" Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.142151 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d" containerName="registry-server" Dec 10 07:45:00 crc kubenswrapper[4765]: E1210 07:45:00.142160 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d" containerName="extract-content" Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.142170 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d" containerName="extract-content" Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.142384 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9c80917-6fe3-4e7f-9b9b-69eea43a1f9d" containerName="registry-server" Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.143131 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw" Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.146666 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.146910 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.167130 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw"] Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.265278 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x85jl\" (UniqueName: \"kubernetes.io/projected/672b6986-0fb6-41a2-aa54-0b2f36d1bd77-kube-api-access-x85jl\") pod \"collect-profiles-29422545-2q7vw\" (UID: \"672b6986-0fb6-41a2-aa54-0b2f36d1bd77\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw" Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.265386 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/672b6986-0fb6-41a2-aa54-0b2f36d1bd77-secret-volume\") pod \"collect-profiles-29422545-2q7vw\" (UID: \"672b6986-0fb6-41a2-aa54-0b2f36d1bd77\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw" Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.265429 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/672b6986-0fb6-41a2-aa54-0b2f36d1bd77-config-volume\") pod \"collect-profiles-29422545-2q7vw\" (UID: \"672b6986-0fb6-41a2-aa54-0b2f36d1bd77\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw" Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.366363 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x85jl\" (UniqueName: \"kubernetes.io/projected/672b6986-0fb6-41a2-aa54-0b2f36d1bd77-kube-api-access-x85jl\") pod \"collect-profiles-29422545-2q7vw\" (UID: \"672b6986-0fb6-41a2-aa54-0b2f36d1bd77\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw" Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.366464 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/672b6986-0fb6-41a2-aa54-0b2f36d1bd77-secret-volume\") pod \"collect-profiles-29422545-2q7vw\" (UID: \"672b6986-0fb6-41a2-aa54-0b2f36d1bd77\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw" Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.366508 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/672b6986-0fb6-41a2-aa54-0b2f36d1bd77-config-volume\") pod \"collect-profiles-29422545-2q7vw\" (UID: \"672b6986-0fb6-41a2-aa54-0b2f36d1bd77\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw" Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.367586 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/672b6986-0fb6-41a2-aa54-0b2f36d1bd77-config-volume\") pod \"collect-profiles-29422545-2q7vw\" (UID: \"672b6986-0fb6-41a2-aa54-0b2f36d1bd77\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw" Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.375709 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/672b6986-0fb6-41a2-aa54-0b2f36d1bd77-secret-volume\") pod \"collect-profiles-29422545-2q7vw\" (UID: \"672b6986-0fb6-41a2-aa54-0b2f36d1bd77\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw" Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.388515 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x85jl\" (UniqueName: \"kubernetes.io/projected/672b6986-0fb6-41a2-aa54-0b2f36d1bd77-kube-api-access-x85jl\") pod \"collect-profiles-29422545-2q7vw\" (UID: \"672b6986-0fb6-41a2-aa54-0b2f36d1bd77\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw" Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.468130 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw" Dec 10 07:45:00 crc kubenswrapper[4765]: I1210 07:45:00.880876 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw"] Dec 10 07:45:01 crc kubenswrapper[4765]: I1210 07:45:01.760965 4765 generic.go:334] "Generic (PLEG): container finished" podID="672b6986-0fb6-41a2-aa54-0b2f36d1bd77" containerID="9d17d82689f2e4fcce890d09063bac7f8dd1d2f6ea5b46fa561fd41e53cd6fdd" exitCode=0 Dec 10 07:45:01 crc kubenswrapper[4765]: I1210 07:45:01.761072 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw" event={"ID":"672b6986-0fb6-41a2-aa54-0b2f36d1bd77","Type":"ContainerDied","Data":"9d17d82689f2e4fcce890d09063bac7f8dd1d2f6ea5b46fa561fd41e53cd6fdd"} Dec 10 07:45:01 crc kubenswrapper[4765]: I1210 07:45:01.761332 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw" event={"ID":"672b6986-0fb6-41a2-aa54-0b2f36d1bd77","Type":"ContainerStarted","Data":"b7e35ed2759924e3ce96251aaf33000f34798bddb5ae015f92598305cb57a480"} Dec 10 07:45:03 crc kubenswrapper[4765]: I1210 07:45:03.048494 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw" Dec 10 07:45:03 crc kubenswrapper[4765]: I1210 07:45:03.113078 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/672b6986-0fb6-41a2-aa54-0b2f36d1bd77-config-volume\") pod \"672b6986-0fb6-41a2-aa54-0b2f36d1bd77\" (UID: \"672b6986-0fb6-41a2-aa54-0b2f36d1bd77\") " Dec 10 07:45:03 crc kubenswrapper[4765]: I1210 07:45:03.113202 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x85jl\" (UniqueName: \"kubernetes.io/projected/672b6986-0fb6-41a2-aa54-0b2f36d1bd77-kube-api-access-x85jl\") pod \"672b6986-0fb6-41a2-aa54-0b2f36d1bd77\" (UID: \"672b6986-0fb6-41a2-aa54-0b2f36d1bd77\") " Dec 10 07:45:03 crc kubenswrapper[4765]: I1210 07:45:03.113238 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/672b6986-0fb6-41a2-aa54-0b2f36d1bd77-secret-volume\") pod \"672b6986-0fb6-41a2-aa54-0b2f36d1bd77\" (UID: \"672b6986-0fb6-41a2-aa54-0b2f36d1bd77\") " Dec 10 07:45:03 crc kubenswrapper[4765]: I1210 07:45:03.113954 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/672b6986-0fb6-41a2-aa54-0b2f36d1bd77-config-volume" (OuterVolumeSpecName: "config-volume") pod "672b6986-0fb6-41a2-aa54-0b2f36d1bd77" (UID: "672b6986-0fb6-41a2-aa54-0b2f36d1bd77"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:45:03 crc kubenswrapper[4765]: I1210 07:45:03.120652 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/672b6986-0fb6-41a2-aa54-0b2f36d1bd77-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "672b6986-0fb6-41a2-aa54-0b2f36d1bd77" (UID: "672b6986-0fb6-41a2-aa54-0b2f36d1bd77"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 07:45:03 crc kubenswrapper[4765]: I1210 07:45:03.120730 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/672b6986-0fb6-41a2-aa54-0b2f36d1bd77-kube-api-access-x85jl" (OuterVolumeSpecName: "kube-api-access-x85jl") pod "672b6986-0fb6-41a2-aa54-0b2f36d1bd77" (UID: "672b6986-0fb6-41a2-aa54-0b2f36d1bd77"). InnerVolumeSpecName "kube-api-access-x85jl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:45:03 crc kubenswrapper[4765]: I1210 07:45:03.215390 4765 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/672b6986-0fb6-41a2-aa54-0b2f36d1bd77-config-volume\") on node \"crc\" DevicePath \"\"" Dec 10 07:45:03 crc kubenswrapper[4765]: I1210 07:45:03.215769 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x85jl\" (UniqueName: \"kubernetes.io/projected/672b6986-0fb6-41a2-aa54-0b2f36d1bd77-kube-api-access-x85jl\") on node \"crc\" DevicePath \"\"" Dec 10 07:45:03 crc kubenswrapper[4765]: I1210 07:45:03.215832 4765 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/672b6986-0fb6-41a2-aa54-0b2f36d1bd77-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 10 07:45:03 crc kubenswrapper[4765]: I1210 07:45:03.778134 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw" event={"ID":"672b6986-0fb6-41a2-aa54-0b2f36d1bd77","Type":"ContainerDied","Data":"b7e35ed2759924e3ce96251aaf33000f34798bddb5ae015f92598305cb57a480"} Dec 10 07:45:03 crc kubenswrapper[4765]: I1210 07:45:03.778228 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422545-2q7vw" Dec 10 07:45:03 crc kubenswrapper[4765]: I1210 07:45:03.778240 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7e35ed2759924e3ce96251aaf33000f34798bddb5ae015f92598305cb57a480" Dec 10 07:45:04 crc kubenswrapper[4765]: I1210 07:45:04.049935 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:45:04 crc kubenswrapper[4765]: I1210 07:45:04.050068 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:45:04 crc kubenswrapper[4765]: I1210 07:45:04.129422 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz"] Dec 10 07:45:04 crc kubenswrapper[4765]: I1210 07:45:04.136052 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422500-hwqzz"] Dec 10 07:45:04 crc kubenswrapper[4765]: I1210 07:45:04.598667 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2666b91c-c78a-4c38-92d7-a4322fc55bd8" path="/var/lib/kubelet/pods/2666b91c-c78a-4c38-92d7-a4322fc55bd8/volumes" Dec 10 07:45:29 crc kubenswrapper[4765]: I1210 07:45:29.858725 4765 scope.go:117] "RemoveContainer" containerID="f6bc5c702b03233a5ae15a29205a4114bba8a5bace8f951f5465fc6ae975e03d" Dec 10 07:45:34 crc kubenswrapper[4765]: I1210 07:45:34.049711 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:45:34 crc kubenswrapper[4765]: I1210 07:45:34.050306 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:46:04 crc kubenswrapper[4765]: I1210 07:46:04.049989 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:46:04 crc kubenswrapper[4765]: I1210 07:46:04.050598 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:46:04 crc kubenswrapper[4765]: I1210 07:46:04.050662 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 07:46:04 crc kubenswrapper[4765]: I1210 07:46:04.051446 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"96e0732dc61f42f67cdd9eb5e4cd3d16738757319ed34de6ac508062ca319715"} pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 07:46:04 crc kubenswrapper[4765]: I1210 07:46:04.051515 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" containerID="cri-o://96e0732dc61f42f67cdd9eb5e4cd3d16738757319ed34de6ac508062ca319715" gracePeriod=600 Dec 10 07:46:04 crc kubenswrapper[4765]: I1210 07:46:04.272872 4765 generic.go:334] "Generic (PLEG): container finished" podID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerID="96e0732dc61f42f67cdd9eb5e4cd3d16738757319ed34de6ac508062ca319715" exitCode=0 Dec 10 07:46:04 crc kubenswrapper[4765]: I1210 07:46:04.272919 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerDied","Data":"96e0732dc61f42f67cdd9eb5e4cd3d16738757319ed34de6ac508062ca319715"} Dec 10 07:46:04 crc kubenswrapper[4765]: I1210 07:46:04.273232 4765 scope.go:117] "RemoveContainer" containerID="f7807186e1a1ec3c7b37b4b2ec02d97e29f00b47b18251393d86d6cdc7cf2d97" Dec 10 07:46:05 crc kubenswrapper[4765]: I1210 07:46:05.283331 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271"} Dec 10 07:46:09 crc kubenswrapper[4765]: I1210 07:46:09.754786 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ldrpq"] Dec 10 07:46:09 crc kubenswrapper[4765]: E1210 07:46:09.755723 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="672b6986-0fb6-41a2-aa54-0b2f36d1bd77" containerName="collect-profiles" Dec 10 07:46:09 crc kubenswrapper[4765]: I1210 07:46:09.755739 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="672b6986-0fb6-41a2-aa54-0b2f36d1bd77" containerName="collect-profiles" Dec 10 07:46:09 crc kubenswrapper[4765]: I1210 07:46:09.755941 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="672b6986-0fb6-41a2-aa54-0b2f36d1bd77" containerName="collect-profiles" Dec 10 07:46:09 crc kubenswrapper[4765]: I1210 07:46:09.757225 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ldrpq" Dec 10 07:46:09 crc kubenswrapper[4765]: I1210 07:46:09.766659 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ldrpq"] Dec 10 07:46:09 crc kubenswrapper[4765]: I1210 07:46:09.910934 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/572932b3-afe2-4441-9bd6-500e37e653de-utilities\") pod \"community-operators-ldrpq\" (UID: \"572932b3-afe2-4441-9bd6-500e37e653de\") " pod="openshift-marketplace/community-operators-ldrpq" Dec 10 07:46:09 crc kubenswrapper[4765]: I1210 07:46:09.911367 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sprw\" (UniqueName: \"kubernetes.io/projected/572932b3-afe2-4441-9bd6-500e37e653de-kube-api-access-9sprw\") pod \"community-operators-ldrpq\" (UID: \"572932b3-afe2-4441-9bd6-500e37e653de\") " pod="openshift-marketplace/community-operators-ldrpq" Dec 10 07:46:09 crc kubenswrapper[4765]: I1210 07:46:09.911440 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/572932b3-afe2-4441-9bd6-500e37e653de-catalog-content\") pod \"community-operators-ldrpq\" (UID: \"572932b3-afe2-4441-9bd6-500e37e653de\") " pod="openshift-marketplace/community-operators-ldrpq" Dec 10 07:46:10 crc kubenswrapper[4765]: I1210 07:46:10.013660 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/572932b3-afe2-4441-9bd6-500e37e653de-catalog-content\") pod \"community-operators-ldrpq\" (UID: \"572932b3-afe2-4441-9bd6-500e37e653de\") " pod="openshift-marketplace/community-operators-ldrpq" Dec 10 07:46:10 crc kubenswrapper[4765]: I1210 07:46:10.013859 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/572932b3-afe2-4441-9bd6-500e37e653de-utilities\") pod \"community-operators-ldrpq\" (UID: \"572932b3-afe2-4441-9bd6-500e37e653de\") " pod="openshift-marketplace/community-operators-ldrpq" Dec 10 07:46:10 crc kubenswrapper[4765]: I1210 07:46:10.013890 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sprw\" (UniqueName: \"kubernetes.io/projected/572932b3-afe2-4441-9bd6-500e37e653de-kube-api-access-9sprw\") pod \"community-operators-ldrpq\" (UID: \"572932b3-afe2-4441-9bd6-500e37e653de\") " pod="openshift-marketplace/community-operators-ldrpq" Dec 10 07:46:10 crc kubenswrapper[4765]: I1210 07:46:10.014248 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/572932b3-afe2-4441-9bd6-500e37e653de-catalog-content\") pod \"community-operators-ldrpq\" (UID: \"572932b3-afe2-4441-9bd6-500e37e653de\") " pod="openshift-marketplace/community-operators-ldrpq" Dec 10 07:46:10 crc kubenswrapper[4765]: I1210 07:46:10.014319 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/572932b3-afe2-4441-9bd6-500e37e653de-utilities\") pod \"community-operators-ldrpq\" (UID: \"572932b3-afe2-4441-9bd6-500e37e653de\") " pod="openshift-marketplace/community-operators-ldrpq" Dec 10 07:46:10 crc kubenswrapper[4765]: I1210 07:46:10.034928 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sprw\" (UniqueName: \"kubernetes.io/projected/572932b3-afe2-4441-9bd6-500e37e653de-kube-api-access-9sprw\") pod \"community-operators-ldrpq\" (UID: \"572932b3-afe2-4441-9bd6-500e37e653de\") " pod="openshift-marketplace/community-operators-ldrpq" Dec 10 07:46:10 crc kubenswrapper[4765]: I1210 07:46:10.079042 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ldrpq" Dec 10 07:46:10 crc kubenswrapper[4765]: I1210 07:46:10.618199 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ldrpq"] Dec 10 07:46:11 crc kubenswrapper[4765]: I1210 07:46:11.327989 4765 generic.go:334] "Generic (PLEG): container finished" podID="572932b3-afe2-4441-9bd6-500e37e653de" containerID="01ae2f13a5fadaa647e5620d948fc550b03831b7596343c2cfdb3f659716eb7f" exitCode=0 Dec 10 07:46:11 crc kubenswrapper[4765]: I1210 07:46:11.328047 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldrpq" event={"ID":"572932b3-afe2-4441-9bd6-500e37e653de","Type":"ContainerDied","Data":"01ae2f13a5fadaa647e5620d948fc550b03831b7596343c2cfdb3f659716eb7f"} Dec 10 07:46:11 crc kubenswrapper[4765]: I1210 07:46:11.328114 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldrpq" event={"ID":"572932b3-afe2-4441-9bd6-500e37e653de","Type":"ContainerStarted","Data":"aa0cb95609565775a7045562b9d06e6abcfbca2b2a57039eb62c77a0e9b41797"} Dec 10 07:46:11 crc kubenswrapper[4765]: I1210 07:46:11.330623 4765 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 07:46:12 crc kubenswrapper[4765]: I1210 07:46:12.357046 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldrpq" event={"ID":"572932b3-afe2-4441-9bd6-500e37e653de","Type":"ContainerStarted","Data":"90956612857d6d6d24a7f6d438ce69481628a6716f6af36285a2fb5eadaab504"} Dec 10 07:46:13 crc kubenswrapper[4765]: I1210 07:46:13.366458 4765 generic.go:334] "Generic (PLEG): container finished" podID="572932b3-afe2-4441-9bd6-500e37e653de" containerID="90956612857d6d6d24a7f6d438ce69481628a6716f6af36285a2fb5eadaab504" exitCode=0 Dec 10 07:46:13 crc kubenswrapper[4765]: I1210 07:46:13.366568 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldrpq" event={"ID":"572932b3-afe2-4441-9bd6-500e37e653de","Type":"ContainerDied","Data":"90956612857d6d6d24a7f6d438ce69481628a6716f6af36285a2fb5eadaab504"} Dec 10 07:46:14 crc kubenswrapper[4765]: I1210 07:46:14.376979 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldrpq" event={"ID":"572932b3-afe2-4441-9bd6-500e37e653de","Type":"ContainerStarted","Data":"b4d7da8a234c9ca9d56bd7a5e23f0983017b623be7c2fd4f75e226cd0c3e4be1"} Dec 10 07:46:14 crc kubenswrapper[4765]: I1210 07:46:14.395814 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ldrpq" podStartSLOduration=2.9529910839999998 podStartE2EDuration="5.395776705s" podCreationTimestamp="2025-12-10 07:46:09 +0000 UTC" firstStartedPulling="2025-12-10 07:46:11.330360754 +0000 UTC m=+3491.057026070" lastFinishedPulling="2025-12-10 07:46:13.773146365 +0000 UTC m=+3493.499811691" observedRunningTime="2025-12-10 07:46:14.392496761 +0000 UTC m=+3494.119162077" watchObservedRunningTime="2025-12-10 07:46:14.395776705 +0000 UTC m=+3494.122442021" Dec 10 07:46:20 crc kubenswrapper[4765]: I1210 07:46:20.079636 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ldrpq" Dec 10 07:46:20 crc kubenswrapper[4765]: I1210 07:46:20.080103 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ldrpq" Dec 10 07:46:20 crc kubenswrapper[4765]: I1210 07:46:20.122829 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ldrpq" Dec 10 07:46:20 crc kubenswrapper[4765]: I1210 07:46:20.457803 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ldrpq" Dec 10 07:46:20 crc kubenswrapper[4765]: I1210 07:46:20.506007 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ldrpq"] Dec 10 07:46:22 crc kubenswrapper[4765]: I1210 07:46:22.430430 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ldrpq" podUID="572932b3-afe2-4441-9bd6-500e37e653de" containerName="registry-server" containerID="cri-o://b4d7da8a234c9ca9d56bd7a5e23f0983017b623be7c2fd4f75e226cd0c3e4be1" gracePeriod=2 Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.307693 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ldrpq" Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.408698 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9sprw\" (UniqueName: \"kubernetes.io/projected/572932b3-afe2-4441-9bd6-500e37e653de-kube-api-access-9sprw\") pod \"572932b3-afe2-4441-9bd6-500e37e653de\" (UID: \"572932b3-afe2-4441-9bd6-500e37e653de\") " Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.408906 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/572932b3-afe2-4441-9bd6-500e37e653de-catalog-content\") pod \"572932b3-afe2-4441-9bd6-500e37e653de\" (UID: \"572932b3-afe2-4441-9bd6-500e37e653de\") " Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.408955 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/572932b3-afe2-4441-9bd6-500e37e653de-utilities\") pod \"572932b3-afe2-4441-9bd6-500e37e653de\" (UID: \"572932b3-afe2-4441-9bd6-500e37e653de\") " Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.409826 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/572932b3-afe2-4441-9bd6-500e37e653de-utilities" (OuterVolumeSpecName: "utilities") pod "572932b3-afe2-4441-9bd6-500e37e653de" (UID: "572932b3-afe2-4441-9bd6-500e37e653de"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.418537 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/572932b3-afe2-4441-9bd6-500e37e653de-kube-api-access-9sprw" (OuterVolumeSpecName: "kube-api-access-9sprw") pod "572932b3-afe2-4441-9bd6-500e37e653de" (UID: "572932b3-afe2-4441-9bd6-500e37e653de"). InnerVolumeSpecName "kube-api-access-9sprw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.440371 4765 generic.go:334] "Generic (PLEG): container finished" podID="572932b3-afe2-4441-9bd6-500e37e653de" containerID="b4d7da8a234c9ca9d56bd7a5e23f0983017b623be7c2fd4f75e226cd0c3e4be1" exitCode=0 Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.440424 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldrpq" event={"ID":"572932b3-afe2-4441-9bd6-500e37e653de","Type":"ContainerDied","Data":"b4d7da8a234c9ca9d56bd7a5e23f0983017b623be7c2fd4f75e226cd0c3e4be1"} Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.440464 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ldrpq" event={"ID":"572932b3-afe2-4441-9bd6-500e37e653de","Type":"ContainerDied","Data":"aa0cb95609565775a7045562b9d06e6abcfbca2b2a57039eb62c77a0e9b41797"} Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.440492 4765 scope.go:117] "RemoveContainer" containerID="b4d7da8a234c9ca9d56bd7a5e23f0983017b623be7c2fd4f75e226cd0c3e4be1" Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.440546 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ldrpq" Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.458677 4765 scope.go:117] "RemoveContainer" containerID="90956612857d6d6d24a7f6d438ce69481628a6716f6af36285a2fb5eadaab504" Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.477179 4765 scope.go:117] "RemoveContainer" containerID="01ae2f13a5fadaa647e5620d948fc550b03831b7596343c2cfdb3f659716eb7f" Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.482423 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/572932b3-afe2-4441-9bd6-500e37e653de-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "572932b3-afe2-4441-9bd6-500e37e653de" (UID: "572932b3-afe2-4441-9bd6-500e37e653de"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.501935 4765 scope.go:117] "RemoveContainer" containerID="b4d7da8a234c9ca9d56bd7a5e23f0983017b623be7c2fd4f75e226cd0c3e4be1" Dec 10 07:46:23 crc kubenswrapper[4765]: E1210 07:46:23.502267 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4d7da8a234c9ca9d56bd7a5e23f0983017b623be7c2fd4f75e226cd0c3e4be1\": container with ID starting with b4d7da8a234c9ca9d56bd7a5e23f0983017b623be7c2fd4f75e226cd0c3e4be1 not found: ID does not exist" containerID="b4d7da8a234c9ca9d56bd7a5e23f0983017b623be7c2fd4f75e226cd0c3e4be1" Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.502316 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4d7da8a234c9ca9d56bd7a5e23f0983017b623be7c2fd4f75e226cd0c3e4be1"} err="failed to get container status \"b4d7da8a234c9ca9d56bd7a5e23f0983017b623be7c2fd4f75e226cd0c3e4be1\": rpc error: code = NotFound desc = could not find container \"b4d7da8a234c9ca9d56bd7a5e23f0983017b623be7c2fd4f75e226cd0c3e4be1\": container with ID starting with b4d7da8a234c9ca9d56bd7a5e23f0983017b623be7c2fd4f75e226cd0c3e4be1 not found: ID does not exist" Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.502351 4765 scope.go:117] "RemoveContainer" containerID="90956612857d6d6d24a7f6d438ce69481628a6716f6af36285a2fb5eadaab504" Dec 10 07:46:23 crc kubenswrapper[4765]: E1210 07:46:23.502628 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90956612857d6d6d24a7f6d438ce69481628a6716f6af36285a2fb5eadaab504\": container with ID starting with 90956612857d6d6d24a7f6d438ce69481628a6716f6af36285a2fb5eadaab504 not found: ID does not exist" containerID="90956612857d6d6d24a7f6d438ce69481628a6716f6af36285a2fb5eadaab504" Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.502663 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90956612857d6d6d24a7f6d438ce69481628a6716f6af36285a2fb5eadaab504"} err="failed to get container status \"90956612857d6d6d24a7f6d438ce69481628a6716f6af36285a2fb5eadaab504\": rpc error: code = NotFound desc = could not find container \"90956612857d6d6d24a7f6d438ce69481628a6716f6af36285a2fb5eadaab504\": container with ID starting with 90956612857d6d6d24a7f6d438ce69481628a6716f6af36285a2fb5eadaab504 not found: ID does not exist" Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.502738 4765 scope.go:117] "RemoveContainer" containerID="01ae2f13a5fadaa647e5620d948fc550b03831b7596343c2cfdb3f659716eb7f" Dec 10 07:46:23 crc kubenswrapper[4765]: E1210 07:46:23.502977 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01ae2f13a5fadaa647e5620d948fc550b03831b7596343c2cfdb3f659716eb7f\": container with ID starting with 01ae2f13a5fadaa647e5620d948fc550b03831b7596343c2cfdb3f659716eb7f not found: ID does not exist" containerID="01ae2f13a5fadaa647e5620d948fc550b03831b7596343c2cfdb3f659716eb7f" Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.503004 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01ae2f13a5fadaa647e5620d948fc550b03831b7596343c2cfdb3f659716eb7f"} err="failed to get container status \"01ae2f13a5fadaa647e5620d948fc550b03831b7596343c2cfdb3f659716eb7f\": rpc error: code = NotFound desc = could not find container \"01ae2f13a5fadaa647e5620d948fc550b03831b7596343c2cfdb3f659716eb7f\": container with ID starting with 01ae2f13a5fadaa647e5620d948fc550b03831b7596343c2cfdb3f659716eb7f not found: ID does not exist" Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.511201 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9sprw\" (UniqueName: \"kubernetes.io/projected/572932b3-afe2-4441-9bd6-500e37e653de-kube-api-access-9sprw\") on node \"crc\" DevicePath \"\"" Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.511254 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/572932b3-afe2-4441-9bd6-500e37e653de-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.511273 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/572932b3-afe2-4441-9bd6-500e37e653de-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.772782 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ldrpq"] Dec 10 07:46:23 crc kubenswrapper[4765]: I1210 07:46:23.779649 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ldrpq"] Dec 10 07:46:24 crc kubenswrapper[4765]: I1210 07:46:24.597152 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="572932b3-afe2-4441-9bd6-500e37e653de" path="/var/lib/kubelet/pods/572932b3-afe2-4441-9bd6-500e37e653de/volumes" Dec 10 07:48:04 crc kubenswrapper[4765]: I1210 07:48:04.049932 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:48:04 crc kubenswrapper[4765]: I1210 07:48:04.050519 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:48:34 crc kubenswrapper[4765]: I1210 07:48:34.049427 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:48:34 crc kubenswrapper[4765]: I1210 07:48:34.050009 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:48:34 crc kubenswrapper[4765]: I1210 07:48:34.729008 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wthtc"] Dec 10 07:48:34 crc kubenswrapper[4765]: E1210 07:48:34.729428 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="572932b3-afe2-4441-9bd6-500e37e653de" containerName="registry-server" Dec 10 07:48:34 crc kubenswrapper[4765]: I1210 07:48:34.729448 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="572932b3-afe2-4441-9bd6-500e37e653de" containerName="registry-server" Dec 10 07:48:34 crc kubenswrapper[4765]: E1210 07:48:34.729469 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="572932b3-afe2-4441-9bd6-500e37e653de" containerName="extract-content" Dec 10 07:48:34 crc kubenswrapper[4765]: I1210 07:48:34.729477 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="572932b3-afe2-4441-9bd6-500e37e653de" containerName="extract-content" Dec 10 07:48:34 crc kubenswrapper[4765]: E1210 07:48:34.729500 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="572932b3-afe2-4441-9bd6-500e37e653de" containerName="extract-utilities" Dec 10 07:48:34 crc kubenswrapper[4765]: I1210 07:48:34.729510 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="572932b3-afe2-4441-9bd6-500e37e653de" containerName="extract-utilities" Dec 10 07:48:34 crc kubenswrapper[4765]: I1210 07:48:34.729725 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="572932b3-afe2-4441-9bd6-500e37e653de" containerName="registry-server" Dec 10 07:48:34 crc kubenswrapper[4765]: I1210 07:48:34.748568 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wthtc" Dec 10 07:48:34 crc kubenswrapper[4765]: I1210 07:48:34.758309 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wthtc"] Dec 10 07:48:34 crc kubenswrapper[4765]: I1210 07:48:34.848816 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmkqd\" (UniqueName: \"kubernetes.io/projected/a8e338e3-b03d-40ed-aad4-6ef1305243b1-kube-api-access-fmkqd\") pod \"redhat-marketplace-wthtc\" (UID: \"a8e338e3-b03d-40ed-aad4-6ef1305243b1\") " pod="openshift-marketplace/redhat-marketplace-wthtc" Dec 10 07:48:34 crc kubenswrapper[4765]: I1210 07:48:34.848883 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8e338e3-b03d-40ed-aad4-6ef1305243b1-catalog-content\") pod \"redhat-marketplace-wthtc\" (UID: \"a8e338e3-b03d-40ed-aad4-6ef1305243b1\") " pod="openshift-marketplace/redhat-marketplace-wthtc" Dec 10 07:48:34 crc kubenswrapper[4765]: I1210 07:48:34.848924 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8e338e3-b03d-40ed-aad4-6ef1305243b1-utilities\") pod \"redhat-marketplace-wthtc\" (UID: \"a8e338e3-b03d-40ed-aad4-6ef1305243b1\") " pod="openshift-marketplace/redhat-marketplace-wthtc" Dec 10 07:48:34 crc kubenswrapper[4765]: I1210 07:48:34.950978 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8e338e3-b03d-40ed-aad4-6ef1305243b1-catalog-content\") pod \"redhat-marketplace-wthtc\" (UID: \"a8e338e3-b03d-40ed-aad4-6ef1305243b1\") " pod="openshift-marketplace/redhat-marketplace-wthtc" Dec 10 07:48:34 crc kubenswrapper[4765]: I1210 07:48:34.951044 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8e338e3-b03d-40ed-aad4-6ef1305243b1-utilities\") pod \"redhat-marketplace-wthtc\" (UID: \"a8e338e3-b03d-40ed-aad4-6ef1305243b1\") " pod="openshift-marketplace/redhat-marketplace-wthtc" Dec 10 07:48:34 crc kubenswrapper[4765]: I1210 07:48:34.951141 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmkqd\" (UniqueName: \"kubernetes.io/projected/a8e338e3-b03d-40ed-aad4-6ef1305243b1-kube-api-access-fmkqd\") pod \"redhat-marketplace-wthtc\" (UID: \"a8e338e3-b03d-40ed-aad4-6ef1305243b1\") " pod="openshift-marketplace/redhat-marketplace-wthtc" Dec 10 07:48:34 crc kubenswrapper[4765]: I1210 07:48:34.951834 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8e338e3-b03d-40ed-aad4-6ef1305243b1-catalog-content\") pod \"redhat-marketplace-wthtc\" (UID: \"a8e338e3-b03d-40ed-aad4-6ef1305243b1\") " pod="openshift-marketplace/redhat-marketplace-wthtc" Dec 10 07:48:34 crc kubenswrapper[4765]: I1210 07:48:34.952295 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8e338e3-b03d-40ed-aad4-6ef1305243b1-utilities\") pod \"redhat-marketplace-wthtc\" (UID: \"a8e338e3-b03d-40ed-aad4-6ef1305243b1\") " pod="openshift-marketplace/redhat-marketplace-wthtc" Dec 10 07:48:34 crc kubenswrapper[4765]: I1210 07:48:34.975504 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmkqd\" (UniqueName: \"kubernetes.io/projected/a8e338e3-b03d-40ed-aad4-6ef1305243b1-kube-api-access-fmkqd\") pod \"redhat-marketplace-wthtc\" (UID: \"a8e338e3-b03d-40ed-aad4-6ef1305243b1\") " pod="openshift-marketplace/redhat-marketplace-wthtc" Dec 10 07:48:35 crc kubenswrapper[4765]: I1210 07:48:35.106661 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wthtc" Dec 10 07:48:35 crc kubenswrapper[4765]: I1210 07:48:35.524013 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wthtc"] Dec 10 07:48:36 crc kubenswrapper[4765]: I1210 07:48:36.429030 4765 generic.go:334] "Generic (PLEG): container finished" podID="a8e338e3-b03d-40ed-aad4-6ef1305243b1" containerID="7c3828df778643cc5070495a86dbf39a544b5fd66f9183f11fe0fbc231c54b6d" exitCode=0 Dec 10 07:48:36 crc kubenswrapper[4765]: I1210 07:48:36.429111 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wthtc" event={"ID":"a8e338e3-b03d-40ed-aad4-6ef1305243b1","Type":"ContainerDied","Data":"7c3828df778643cc5070495a86dbf39a544b5fd66f9183f11fe0fbc231c54b6d"} Dec 10 07:48:36 crc kubenswrapper[4765]: I1210 07:48:36.429517 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wthtc" event={"ID":"a8e338e3-b03d-40ed-aad4-6ef1305243b1","Type":"ContainerStarted","Data":"185a661a86ee18f4bc9ac06fd440a5b0377607dc5ed0e42495fd8ad87d0ea2f1"} Dec 10 07:48:37 crc kubenswrapper[4765]: I1210 07:48:37.437720 4765 generic.go:334] "Generic (PLEG): container finished" podID="a8e338e3-b03d-40ed-aad4-6ef1305243b1" containerID="6859d42b18122ba9e625dfa21c6610268b9597e77fe08e88e3110c01c03be657" exitCode=0 Dec 10 07:48:37 crc kubenswrapper[4765]: I1210 07:48:37.437781 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wthtc" event={"ID":"a8e338e3-b03d-40ed-aad4-6ef1305243b1","Type":"ContainerDied","Data":"6859d42b18122ba9e625dfa21c6610268b9597e77fe08e88e3110c01c03be657"} Dec 10 07:48:38 crc kubenswrapper[4765]: I1210 07:48:38.448631 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wthtc" event={"ID":"a8e338e3-b03d-40ed-aad4-6ef1305243b1","Type":"ContainerStarted","Data":"ec43f5d8cec442eb60ab18393eb131c5620c78e51ffd0ea9501ee8bb3c2dd799"} Dec 10 07:48:38 crc kubenswrapper[4765]: I1210 07:48:38.467238 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wthtc" podStartSLOduration=3.006834659 podStartE2EDuration="4.467217218s" podCreationTimestamp="2025-12-10 07:48:34 +0000 UTC" firstStartedPulling="2025-12-10 07:48:36.431215699 +0000 UTC m=+3636.157881015" lastFinishedPulling="2025-12-10 07:48:37.891598258 +0000 UTC m=+3637.618263574" observedRunningTime="2025-12-10 07:48:38.466219839 +0000 UTC m=+3638.192885155" watchObservedRunningTime="2025-12-10 07:48:38.467217218 +0000 UTC m=+3638.193882544" Dec 10 07:48:45 crc kubenswrapper[4765]: I1210 07:48:45.107559 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wthtc" Dec 10 07:48:45 crc kubenswrapper[4765]: I1210 07:48:45.107998 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wthtc" Dec 10 07:48:45 crc kubenswrapper[4765]: I1210 07:48:45.153771 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wthtc" Dec 10 07:48:45 crc kubenswrapper[4765]: I1210 07:48:45.553692 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wthtc" Dec 10 07:48:45 crc kubenswrapper[4765]: I1210 07:48:45.613570 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wthtc"] Dec 10 07:48:47 crc kubenswrapper[4765]: I1210 07:48:47.526707 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wthtc" podUID="a8e338e3-b03d-40ed-aad4-6ef1305243b1" containerName="registry-server" containerID="cri-o://ec43f5d8cec442eb60ab18393eb131c5620c78e51ffd0ea9501ee8bb3c2dd799" gracePeriod=2 Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.495625 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wthtc" Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.536065 4765 generic.go:334] "Generic (PLEG): container finished" podID="a8e338e3-b03d-40ed-aad4-6ef1305243b1" containerID="ec43f5d8cec442eb60ab18393eb131c5620c78e51ffd0ea9501ee8bb3c2dd799" exitCode=0 Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.536156 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wthtc" Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.536151 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wthtc" event={"ID":"a8e338e3-b03d-40ed-aad4-6ef1305243b1","Type":"ContainerDied","Data":"ec43f5d8cec442eb60ab18393eb131c5620c78e51ffd0ea9501ee8bb3c2dd799"} Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.536898 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wthtc" event={"ID":"a8e338e3-b03d-40ed-aad4-6ef1305243b1","Type":"ContainerDied","Data":"185a661a86ee18f4bc9ac06fd440a5b0377607dc5ed0e42495fd8ad87d0ea2f1"} Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.536923 4765 scope.go:117] "RemoveContainer" containerID="ec43f5d8cec442eb60ab18393eb131c5620c78e51ffd0ea9501ee8bb3c2dd799" Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.554812 4765 scope.go:117] "RemoveContainer" containerID="6859d42b18122ba9e625dfa21c6610268b9597e77fe08e88e3110c01c03be657" Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.571965 4765 scope.go:117] "RemoveContainer" containerID="7c3828df778643cc5070495a86dbf39a544b5fd66f9183f11fe0fbc231c54b6d" Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.592967 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8e338e3-b03d-40ed-aad4-6ef1305243b1-catalog-content\") pod \"a8e338e3-b03d-40ed-aad4-6ef1305243b1\" (UID: \"a8e338e3-b03d-40ed-aad4-6ef1305243b1\") " Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.593139 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8e338e3-b03d-40ed-aad4-6ef1305243b1-utilities\") pod \"a8e338e3-b03d-40ed-aad4-6ef1305243b1\" (UID: \"a8e338e3-b03d-40ed-aad4-6ef1305243b1\") " Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.593247 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmkqd\" (UniqueName: \"kubernetes.io/projected/a8e338e3-b03d-40ed-aad4-6ef1305243b1-kube-api-access-fmkqd\") pod \"a8e338e3-b03d-40ed-aad4-6ef1305243b1\" (UID: \"a8e338e3-b03d-40ed-aad4-6ef1305243b1\") " Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.594655 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8e338e3-b03d-40ed-aad4-6ef1305243b1-utilities" (OuterVolumeSpecName: "utilities") pod "a8e338e3-b03d-40ed-aad4-6ef1305243b1" (UID: "a8e338e3-b03d-40ed-aad4-6ef1305243b1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.597787 4765 scope.go:117] "RemoveContainer" containerID="ec43f5d8cec442eb60ab18393eb131c5620c78e51ffd0ea9501ee8bb3c2dd799" Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.598980 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8e338e3-b03d-40ed-aad4-6ef1305243b1-kube-api-access-fmkqd" (OuterVolumeSpecName: "kube-api-access-fmkqd") pod "a8e338e3-b03d-40ed-aad4-6ef1305243b1" (UID: "a8e338e3-b03d-40ed-aad4-6ef1305243b1"). InnerVolumeSpecName "kube-api-access-fmkqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:48:48 crc kubenswrapper[4765]: E1210 07:48:48.600348 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec43f5d8cec442eb60ab18393eb131c5620c78e51ffd0ea9501ee8bb3c2dd799\": container with ID starting with ec43f5d8cec442eb60ab18393eb131c5620c78e51ffd0ea9501ee8bb3c2dd799 not found: ID does not exist" containerID="ec43f5d8cec442eb60ab18393eb131c5620c78e51ffd0ea9501ee8bb3c2dd799" Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.600403 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec43f5d8cec442eb60ab18393eb131c5620c78e51ffd0ea9501ee8bb3c2dd799"} err="failed to get container status \"ec43f5d8cec442eb60ab18393eb131c5620c78e51ffd0ea9501ee8bb3c2dd799\": rpc error: code = NotFound desc = could not find container \"ec43f5d8cec442eb60ab18393eb131c5620c78e51ffd0ea9501ee8bb3c2dd799\": container with ID starting with ec43f5d8cec442eb60ab18393eb131c5620c78e51ffd0ea9501ee8bb3c2dd799 not found: ID does not exist" Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.600434 4765 scope.go:117] "RemoveContainer" containerID="6859d42b18122ba9e625dfa21c6610268b9597e77fe08e88e3110c01c03be657" Dec 10 07:48:48 crc kubenswrapper[4765]: E1210 07:48:48.600837 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6859d42b18122ba9e625dfa21c6610268b9597e77fe08e88e3110c01c03be657\": container with ID starting with 6859d42b18122ba9e625dfa21c6610268b9597e77fe08e88e3110c01c03be657 not found: ID does not exist" containerID="6859d42b18122ba9e625dfa21c6610268b9597e77fe08e88e3110c01c03be657" Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.600886 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6859d42b18122ba9e625dfa21c6610268b9597e77fe08e88e3110c01c03be657"} err="failed to get container status \"6859d42b18122ba9e625dfa21c6610268b9597e77fe08e88e3110c01c03be657\": rpc error: code = NotFound desc = could not find container \"6859d42b18122ba9e625dfa21c6610268b9597e77fe08e88e3110c01c03be657\": container with ID starting with 6859d42b18122ba9e625dfa21c6610268b9597e77fe08e88e3110c01c03be657 not found: ID does not exist" Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.600917 4765 scope.go:117] "RemoveContainer" containerID="7c3828df778643cc5070495a86dbf39a544b5fd66f9183f11fe0fbc231c54b6d" Dec 10 07:48:48 crc kubenswrapper[4765]: E1210 07:48:48.601227 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c3828df778643cc5070495a86dbf39a544b5fd66f9183f11fe0fbc231c54b6d\": container with ID starting with 7c3828df778643cc5070495a86dbf39a544b5fd66f9183f11fe0fbc231c54b6d not found: ID does not exist" containerID="7c3828df778643cc5070495a86dbf39a544b5fd66f9183f11fe0fbc231c54b6d" Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.601255 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c3828df778643cc5070495a86dbf39a544b5fd66f9183f11fe0fbc231c54b6d"} err="failed to get container status \"7c3828df778643cc5070495a86dbf39a544b5fd66f9183f11fe0fbc231c54b6d\": rpc error: code = NotFound desc = could not find container \"7c3828df778643cc5070495a86dbf39a544b5fd66f9183f11fe0fbc231c54b6d\": container with ID starting with 7c3828df778643cc5070495a86dbf39a544b5fd66f9183f11fe0fbc231c54b6d not found: ID does not exist" Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.616795 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8e338e3-b03d-40ed-aad4-6ef1305243b1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a8e338e3-b03d-40ed-aad4-6ef1305243b1" (UID: "a8e338e3-b03d-40ed-aad4-6ef1305243b1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.694622 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8e338e3-b03d-40ed-aad4-6ef1305243b1-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.694660 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmkqd\" (UniqueName: \"kubernetes.io/projected/a8e338e3-b03d-40ed-aad4-6ef1305243b1-kube-api-access-fmkqd\") on node \"crc\" DevicePath \"\"" Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.694673 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8e338e3-b03d-40ed-aad4-6ef1305243b1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.867757 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wthtc"] Dec 10 07:48:48 crc kubenswrapper[4765]: I1210 07:48:48.874146 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wthtc"] Dec 10 07:48:50 crc kubenswrapper[4765]: I1210 07:48:50.600527 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8e338e3-b03d-40ed-aad4-6ef1305243b1" path="/var/lib/kubelet/pods/a8e338e3-b03d-40ed-aad4-6ef1305243b1/volumes" Dec 10 07:49:04 crc kubenswrapper[4765]: I1210 07:49:04.049905 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:49:04 crc kubenswrapper[4765]: I1210 07:49:04.050539 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:49:04 crc kubenswrapper[4765]: I1210 07:49:04.050592 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 07:49:04 crc kubenswrapper[4765]: I1210 07:49:04.051211 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271"} pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 07:49:04 crc kubenswrapper[4765]: I1210 07:49:04.051265 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" containerID="cri-o://416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" gracePeriod=600 Dec 10 07:49:04 crc kubenswrapper[4765]: E1210 07:49:04.205862 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:49:04 crc kubenswrapper[4765]: I1210 07:49:04.669348 4765 generic.go:334] "Generic (PLEG): container finished" podID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" exitCode=0 Dec 10 07:49:04 crc kubenswrapper[4765]: I1210 07:49:04.669437 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerDied","Data":"416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271"} Dec 10 07:49:04 crc kubenswrapper[4765]: I1210 07:49:04.669526 4765 scope.go:117] "RemoveContainer" containerID="96e0732dc61f42f67cdd9eb5e4cd3d16738757319ed34de6ac508062ca319715" Dec 10 07:49:04 crc kubenswrapper[4765]: I1210 07:49:04.669962 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:49:04 crc kubenswrapper[4765]: E1210 07:49:04.670255 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:49:20 crc kubenswrapper[4765]: I1210 07:49:20.689984 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:49:20 crc kubenswrapper[4765]: E1210 07:49:20.693351 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:49:31 crc kubenswrapper[4765]: I1210 07:49:31.589751 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:49:31 crc kubenswrapper[4765]: E1210 07:49:31.590734 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:49:42 crc kubenswrapper[4765]: I1210 07:49:42.589355 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:49:42 crc kubenswrapper[4765]: E1210 07:49:42.590107 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:49:56 crc kubenswrapper[4765]: I1210 07:49:56.588826 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:49:56 crc kubenswrapper[4765]: E1210 07:49:56.589621 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:50:11 crc kubenswrapper[4765]: I1210 07:50:11.589310 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:50:11 crc kubenswrapper[4765]: E1210 07:50:11.591379 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:50:25 crc kubenswrapper[4765]: I1210 07:50:25.590322 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:50:25 crc kubenswrapper[4765]: E1210 07:50:25.591243 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:50:38 crc kubenswrapper[4765]: I1210 07:50:38.590278 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:50:38 crc kubenswrapper[4765]: E1210 07:50:38.591049 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:50:49 crc kubenswrapper[4765]: I1210 07:50:49.589017 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:50:49 crc kubenswrapper[4765]: E1210 07:50:49.590134 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.352711 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-g8lj2"] Dec 10 07:50:51 crc kubenswrapper[4765]: E1210 07:50:51.353883 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8e338e3-b03d-40ed-aad4-6ef1305243b1" containerName="extract-utilities" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.353902 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8e338e3-b03d-40ed-aad4-6ef1305243b1" containerName="extract-utilities" Dec 10 07:50:51 crc kubenswrapper[4765]: E1210 07:50:51.353929 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8e338e3-b03d-40ed-aad4-6ef1305243b1" containerName="registry-server" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.353937 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8e338e3-b03d-40ed-aad4-6ef1305243b1" containerName="registry-server" Dec 10 07:50:51 crc kubenswrapper[4765]: E1210 07:50:51.353959 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8e338e3-b03d-40ed-aad4-6ef1305243b1" containerName="extract-content" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.353967 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8e338e3-b03d-40ed-aad4-6ef1305243b1" containerName="extract-content" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.354242 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8e338e3-b03d-40ed-aad4-6ef1305243b1" containerName="registry-server" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.355888 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g8lj2" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.371055 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g8lj2"] Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.537218 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gzjjd"] Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.540024 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gzjjd" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.540662 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4lxt\" (UniqueName: \"kubernetes.io/projected/9e0d3d13-633a-411d-9001-c61d8931680d-kube-api-access-b4lxt\") pod \"redhat-operators-g8lj2\" (UID: \"9e0d3d13-633a-411d-9001-c61d8931680d\") " pod="openshift-marketplace/redhat-operators-g8lj2" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.541006 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e0d3d13-633a-411d-9001-c61d8931680d-utilities\") pod \"redhat-operators-g8lj2\" (UID: \"9e0d3d13-633a-411d-9001-c61d8931680d\") " pod="openshift-marketplace/redhat-operators-g8lj2" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.541277 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e0d3d13-633a-411d-9001-c61d8931680d-catalog-content\") pod \"redhat-operators-g8lj2\" (UID: \"9e0d3d13-633a-411d-9001-c61d8931680d\") " pod="openshift-marketplace/redhat-operators-g8lj2" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.563329 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gzjjd"] Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.643373 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kd6vk\" (UniqueName: \"kubernetes.io/projected/0aade9ca-2423-4b08-b110-764470cce8d2-kube-api-access-kd6vk\") pod \"certified-operators-gzjjd\" (UID: \"0aade9ca-2423-4b08-b110-764470cce8d2\") " pod="openshift-marketplace/certified-operators-gzjjd" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.643550 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e0d3d13-633a-411d-9001-c61d8931680d-utilities\") pod \"redhat-operators-g8lj2\" (UID: \"9e0d3d13-633a-411d-9001-c61d8931680d\") " pod="openshift-marketplace/redhat-operators-g8lj2" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.643786 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e0d3d13-633a-411d-9001-c61d8931680d-catalog-content\") pod \"redhat-operators-g8lj2\" (UID: \"9e0d3d13-633a-411d-9001-c61d8931680d\") " pod="openshift-marketplace/redhat-operators-g8lj2" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.643838 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0aade9ca-2423-4b08-b110-764470cce8d2-utilities\") pod \"certified-operators-gzjjd\" (UID: \"0aade9ca-2423-4b08-b110-764470cce8d2\") " pod="openshift-marketplace/certified-operators-gzjjd" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.643863 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0aade9ca-2423-4b08-b110-764470cce8d2-catalog-content\") pod \"certified-operators-gzjjd\" (UID: \"0aade9ca-2423-4b08-b110-764470cce8d2\") " pod="openshift-marketplace/certified-operators-gzjjd" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.644011 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4lxt\" (UniqueName: \"kubernetes.io/projected/9e0d3d13-633a-411d-9001-c61d8931680d-kube-api-access-b4lxt\") pod \"redhat-operators-g8lj2\" (UID: \"9e0d3d13-633a-411d-9001-c61d8931680d\") " pod="openshift-marketplace/redhat-operators-g8lj2" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.644375 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e0d3d13-633a-411d-9001-c61d8931680d-utilities\") pod \"redhat-operators-g8lj2\" (UID: \"9e0d3d13-633a-411d-9001-c61d8931680d\") " pod="openshift-marketplace/redhat-operators-g8lj2" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.644449 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e0d3d13-633a-411d-9001-c61d8931680d-catalog-content\") pod \"redhat-operators-g8lj2\" (UID: \"9e0d3d13-633a-411d-9001-c61d8931680d\") " pod="openshift-marketplace/redhat-operators-g8lj2" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.671965 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4lxt\" (UniqueName: \"kubernetes.io/projected/9e0d3d13-633a-411d-9001-c61d8931680d-kube-api-access-b4lxt\") pod \"redhat-operators-g8lj2\" (UID: \"9e0d3d13-633a-411d-9001-c61d8931680d\") " pod="openshift-marketplace/redhat-operators-g8lj2" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.694839 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g8lj2" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.751403 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kd6vk\" (UniqueName: \"kubernetes.io/projected/0aade9ca-2423-4b08-b110-764470cce8d2-kube-api-access-kd6vk\") pod \"certified-operators-gzjjd\" (UID: \"0aade9ca-2423-4b08-b110-764470cce8d2\") " pod="openshift-marketplace/certified-operators-gzjjd" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.751608 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0aade9ca-2423-4b08-b110-764470cce8d2-utilities\") pod \"certified-operators-gzjjd\" (UID: \"0aade9ca-2423-4b08-b110-764470cce8d2\") " pod="openshift-marketplace/certified-operators-gzjjd" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.751640 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0aade9ca-2423-4b08-b110-764470cce8d2-catalog-content\") pod \"certified-operators-gzjjd\" (UID: \"0aade9ca-2423-4b08-b110-764470cce8d2\") " pod="openshift-marketplace/certified-operators-gzjjd" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.752397 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0aade9ca-2423-4b08-b110-764470cce8d2-catalog-content\") pod \"certified-operators-gzjjd\" (UID: \"0aade9ca-2423-4b08-b110-764470cce8d2\") " pod="openshift-marketplace/certified-operators-gzjjd" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.752822 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0aade9ca-2423-4b08-b110-764470cce8d2-utilities\") pod \"certified-operators-gzjjd\" (UID: \"0aade9ca-2423-4b08-b110-764470cce8d2\") " pod="openshift-marketplace/certified-operators-gzjjd" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.774181 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kd6vk\" (UniqueName: \"kubernetes.io/projected/0aade9ca-2423-4b08-b110-764470cce8d2-kube-api-access-kd6vk\") pod \"certified-operators-gzjjd\" (UID: \"0aade9ca-2423-4b08-b110-764470cce8d2\") " pod="openshift-marketplace/certified-operators-gzjjd" Dec 10 07:50:51 crc kubenswrapper[4765]: I1210 07:50:51.858509 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gzjjd" Dec 10 07:50:52 crc kubenswrapper[4765]: I1210 07:50:52.288876 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g8lj2"] Dec 10 07:50:52 crc kubenswrapper[4765]: I1210 07:50:52.435751 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gzjjd"] Dec 10 07:50:52 crc kubenswrapper[4765]: W1210 07:50:52.444345 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0aade9ca_2423_4b08_b110_764470cce8d2.slice/crio-611ce26dc132f357586e198a3bf657decb85fa7f9edff939fd9ecc477109b3fa WatchSource:0}: Error finding container 611ce26dc132f357586e198a3bf657decb85fa7f9edff939fd9ecc477109b3fa: Status 404 returned error can't find the container with id 611ce26dc132f357586e198a3bf657decb85fa7f9edff939fd9ecc477109b3fa Dec 10 07:50:52 crc kubenswrapper[4765]: I1210 07:50:52.524943 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g8lj2" event={"ID":"9e0d3d13-633a-411d-9001-c61d8931680d","Type":"ContainerStarted","Data":"a3677f062ba4b83f652a11b3bb274ac6b403f48f65d28c217f47197268a5c879"} Dec 10 07:50:52 crc kubenswrapper[4765]: I1210 07:50:52.525004 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g8lj2" event={"ID":"9e0d3d13-633a-411d-9001-c61d8931680d","Type":"ContainerStarted","Data":"7cd90cc372a6b8ceaed354766392774f5e8ee291b0a6e3576ce6a21aa3bfed57"} Dec 10 07:50:52 crc kubenswrapper[4765]: I1210 07:50:52.526874 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gzjjd" event={"ID":"0aade9ca-2423-4b08-b110-764470cce8d2","Type":"ContainerStarted","Data":"611ce26dc132f357586e198a3bf657decb85fa7f9edff939fd9ecc477109b3fa"} Dec 10 07:50:53 crc kubenswrapper[4765]: I1210 07:50:53.537517 4765 generic.go:334] "Generic (PLEG): container finished" podID="9e0d3d13-633a-411d-9001-c61d8931680d" containerID="a3677f062ba4b83f652a11b3bb274ac6b403f48f65d28c217f47197268a5c879" exitCode=0 Dec 10 07:50:53 crc kubenswrapper[4765]: I1210 07:50:53.539190 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g8lj2" event={"ID":"9e0d3d13-633a-411d-9001-c61d8931680d","Type":"ContainerDied","Data":"a3677f062ba4b83f652a11b3bb274ac6b403f48f65d28c217f47197268a5c879"} Dec 10 07:50:53 crc kubenswrapper[4765]: I1210 07:50:53.542271 4765 generic.go:334] "Generic (PLEG): container finished" podID="0aade9ca-2423-4b08-b110-764470cce8d2" containerID="407c98be7a60c3109b06969506235730693e61cc4a24c26f273ef9549b15f49c" exitCode=0 Dec 10 07:50:53 crc kubenswrapper[4765]: I1210 07:50:53.542319 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gzjjd" event={"ID":"0aade9ca-2423-4b08-b110-764470cce8d2","Type":"ContainerDied","Data":"407c98be7a60c3109b06969506235730693e61cc4a24c26f273ef9549b15f49c"} Dec 10 07:50:54 crc kubenswrapper[4765]: I1210 07:50:54.576486 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gzjjd" event={"ID":"0aade9ca-2423-4b08-b110-764470cce8d2","Type":"ContainerStarted","Data":"d1d256c8dca1499506c33b4984560270201744074fe58d58167cc8efef0f4845"} Dec 10 07:50:55 crc kubenswrapper[4765]: I1210 07:50:55.590196 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g8lj2" event={"ID":"9e0d3d13-633a-411d-9001-c61d8931680d","Type":"ContainerStarted","Data":"b8a2f2030bd5dad733727c3b2017240bcd3fe35fd961ca159a50b95ab3d2b78f"} Dec 10 07:50:55 crc kubenswrapper[4765]: I1210 07:50:55.592779 4765 generic.go:334] "Generic (PLEG): container finished" podID="0aade9ca-2423-4b08-b110-764470cce8d2" containerID="d1d256c8dca1499506c33b4984560270201744074fe58d58167cc8efef0f4845" exitCode=0 Dec 10 07:50:55 crc kubenswrapper[4765]: I1210 07:50:55.592858 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gzjjd" event={"ID":"0aade9ca-2423-4b08-b110-764470cce8d2","Type":"ContainerDied","Data":"d1d256c8dca1499506c33b4984560270201744074fe58d58167cc8efef0f4845"} Dec 10 07:50:56 crc kubenswrapper[4765]: I1210 07:50:56.603459 4765 generic.go:334] "Generic (PLEG): container finished" podID="9e0d3d13-633a-411d-9001-c61d8931680d" containerID="b8a2f2030bd5dad733727c3b2017240bcd3fe35fd961ca159a50b95ab3d2b78f" exitCode=0 Dec 10 07:50:56 crc kubenswrapper[4765]: I1210 07:50:56.603531 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g8lj2" event={"ID":"9e0d3d13-633a-411d-9001-c61d8931680d","Type":"ContainerDied","Data":"b8a2f2030bd5dad733727c3b2017240bcd3fe35fd961ca159a50b95ab3d2b78f"} Dec 10 07:50:57 crc kubenswrapper[4765]: I1210 07:50:57.617506 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g8lj2" event={"ID":"9e0d3d13-633a-411d-9001-c61d8931680d","Type":"ContainerStarted","Data":"0e5e6fd0d4b193bb0db5687d613725823059ca18347a5ce59c56a5b7bfbaacd3"} Dec 10 07:50:57 crc kubenswrapper[4765]: I1210 07:50:57.649431 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-g8lj2" podStartSLOduration=3.161624424 podStartE2EDuration="6.649402529s" podCreationTimestamp="2025-12-10 07:50:51 +0000 UTC" firstStartedPulling="2025-12-10 07:50:53.54057411 +0000 UTC m=+3773.267239426" lastFinishedPulling="2025-12-10 07:50:57.028352205 +0000 UTC m=+3776.755017531" observedRunningTime="2025-12-10 07:50:57.639320672 +0000 UTC m=+3777.365985998" watchObservedRunningTime="2025-12-10 07:50:57.649402529 +0000 UTC m=+3777.376067845" Dec 10 07:50:58 crc kubenswrapper[4765]: I1210 07:50:58.627728 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gzjjd" event={"ID":"0aade9ca-2423-4b08-b110-764470cce8d2","Type":"ContainerStarted","Data":"77f52026ce07350c5df65221c1ff5916a7a948516237a4a4c789ddf61731efa7"} Dec 10 07:50:58 crc kubenswrapper[4765]: I1210 07:50:58.663706 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gzjjd" podStartSLOduration=4.099527332 podStartE2EDuration="7.663678063s" podCreationTimestamp="2025-12-10 07:50:51 +0000 UTC" firstStartedPulling="2025-12-10 07:50:53.543925906 +0000 UTC m=+3773.270591222" lastFinishedPulling="2025-12-10 07:50:57.108076637 +0000 UTC m=+3776.834741953" observedRunningTime="2025-12-10 07:50:58.661294625 +0000 UTC m=+3778.387959941" watchObservedRunningTime="2025-12-10 07:50:58.663678063 +0000 UTC m=+3778.390343379" Dec 10 07:51:00 crc kubenswrapper[4765]: I1210 07:51:00.594654 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:51:00 crc kubenswrapper[4765]: E1210 07:51:00.595409 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:51:01 crc kubenswrapper[4765]: I1210 07:51:01.695861 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-g8lj2" Dec 10 07:51:01 crc kubenswrapper[4765]: I1210 07:51:01.696507 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-g8lj2" Dec 10 07:51:01 crc kubenswrapper[4765]: I1210 07:51:01.858977 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gzjjd" Dec 10 07:51:01 crc kubenswrapper[4765]: I1210 07:51:01.859484 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gzjjd" Dec 10 07:51:02 crc kubenswrapper[4765]: I1210 07:51:02.118425 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gzjjd" Dec 10 07:51:02 crc kubenswrapper[4765]: I1210 07:51:02.702433 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gzjjd" Dec 10 07:51:03 crc kubenswrapper[4765]: I1210 07:51:03.206234 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-g8lj2" podUID="9e0d3d13-633a-411d-9001-c61d8931680d" containerName="registry-server" probeResult="failure" output=< Dec 10 07:51:03 crc kubenswrapper[4765]: timeout: failed to connect service ":50051" within 1s Dec 10 07:51:03 crc kubenswrapper[4765]: > Dec 10 07:51:04 crc kubenswrapper[4765]: I1210 07:51:04.928287 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gzjjd"] Dec 10 07:51:05 crc kubenswrapper[4765]: I1210 07:51:05.689827 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gzjjd" podUID="0aade9ca-2423-4b08-b110-764470cce8d2" containerName="registry-server" containerID="cri-o://77f52026ce07350c5df65221c1ff5916a7a948516237a4a4c789ddf61731efa7" gracePeriod=2 Dec 10 07:51:08 crc kubenswrapper[4765]: I1210 07:51:08.717433 4765 generic.go:334] "Generic (PLEG): container finished" podID="0aade9ca-2423-4b08-b110-764470cce8d2" containerID="77f52026ce07350c5df65221c1ff5916a7a948516237a4a4c789ddf61731efa7" exitCode=0 Dec 10 07:51:08 crc kubenswrapper[4765]: I1210 07:51:08.717496 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gzjjd" event={"ID":"0aade9ca-2423-4b08-b110-764470cce8d2","Type":"ContainerDied","Data":"77f52026ce07350c5df65221c1ff5916a7a948516237a4a4c789ddf61731efa7"} Dec 10 07:51:08 crc kubenswrapper[4765]: I1210 07:51:08.768347 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gzjjd" Dec 10 07:51:08 crc kubenswrapper[4765]: I1210 07:51:08.938116 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0aade9ca-2423-4b08-b110-764470cce8d2-catalog-content\") pod \"0aade9ca-2423-4b08-b110-764470cce8d2\" (UID: \"0aade9ca-2423-4b08-b110-764470cce8d2\") " Dec 10 07:51:08 crc kubenswrapper[4765]: I1210 07:51:08.938314 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kd6vk\" (UniqueName: \"kubernetes.io/projected/0aade9ca-2423-4b08-b110-764470cce8d2-kube-api-access-kd6vk\") pod \"0aade9ca-2423-4b08-b110-764470cce8d2\" (UID: \"0aade9ca-2423-4b08-b110-764470cce8d2\") " Dec 10 07:51:08 crc kubenswrapper[4765]: I1210 07:51:08.938413 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0aade9ca-2423-4b08-b110-764470cce8d2-utilities\") pod \"0aade9ca-2423-4b08-b110-764470cce8d2\" (UID: \"0aade9ca-2423-4b08-b110-764470cce8d2\") " Dec 10 07:51:08 crc kubenswrapper[4765]: I1210 07:51:08.939784 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0aade9ca-2423-4b08-b110-764470cce8d2-utilities" (OuterVolumeSpecName: "utilities") pod "0aade9ca-2423-4b08-b110-764470cce8d2" (UID: "0aade9ca-2423-4b08-b110-764470cce8d2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:51:08 crc kubenswrapper[4765]: I1210 07:51:08.945978 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0aade9ca-2423-4b08-b110-764470cce8d2-kube-api-access-kd6vk" (OuterVolumeSpecName: "kube-api-access-kd6vk") pod "0aade9ca-2423-4b08-b110-764470cce8d2" (UID: "0aade9ca-2423-4b08-b110-764470cce8d2"). InnerVolumeSpecName "kube-api-access-kd6vk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:51:09 crc kubenswrapper[4765]: I1210 07:51:09.003067 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0aade9ca-2423-4b08-b110-764470cce8d2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0aade9ca-2423-4b08-b110-764470cce8d2" (UID: "0aade9ca-2423-4b08-b110-764470cce8d2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:51:09 crc kubenswrapper[4765]: I1210 07:51:09.040164 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0aade9ca-2423-4b08-b110-764470cce8d2-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:51:09 crc kubenswrapper[4765]: I1210 07:51:09.040205 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0aade9ca-2423-4b08-b110-764470cce8d2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:51:09 crc kubenswrapper[4765]: I1210 07:51:09.040223 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kd6vk\" (UniqueName: \"kubernetes.io/projected/0aade9ca-2423-4b08-b110-764470cce8d2-kube-api-access-kd6vk\") on node \"crc\" DevicePath \"\"" Dec 10 07:51:09 crc kubenswrapper[4765]: I1210 07:51:09.727879 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gzjjd" event={"ID":"0aade9ca-2423-4b08-b110-764470cce8d2","Type":"ContainerDied","Data":"611ce26dc132f357586e198a3bf657decb85fa7f9edff939fd9ecc477109b3fa"} Dec 10 07:51:09 crc kubenswrapper[4765]: I1210 07:51:09.727951 4765 scope.go:117] "RemoveContainer" containerID="77f52026ce07350c5df65221c1ff5916a7a948516237a4a4c789ddf61731efa7" Dec 10 07:51:09 crc kubenswrapper[4765]: I1210 07:51:09.728041 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gzjjd" Dec 10 07:51:09 crc kubenswrapper[4765]: I1210 07:51:09.750066 4765 scope.go:117] "RemoveContainer" containerID="d1d256c8dca1499506c33b4984560270201744074fe58d58167cc8efef0f4845" Dec 10 07:51:09 crc kubenswrapper[4765]: I1210 07:51:09.762716 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gzjjd"] Dec 10 07:51:09 crc kubenswrapper[4765]: I1210 07:51:09.772218 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gzjjd"] Dec 10 07:51:09 crc kubenswrapper[4765]: I1210 07:51:09.784280 4765 scope.go:117] "RemoveContainer" containerID="407c98be7a60c3109b06969506235730693e61cc4a24c26f273ef9549b15f49c" Dec 10 07:51:10 crc kubenswrapper[4765]: I1210 07:51:10.602508 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0aade9ca-2423-4b08-b110-764470cce8d2" path="/var/lib/kubelet/pods/0aade9ca-2423-4b08-b110-764470cce8d2/volumes" Dec 10 07:51:11 crc kubenswrapper[4765]: I1210 07:51:11.736213 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-g8lj2" Dec 10 07:51:11 crc kubenswrapper[4765]: I1210 07:51:11.787840 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-g8lj2" Dec 10 07:51:12 crc kubenswrapper[4765]: I1210 07:51:12.002535 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-g8lj2"] Dec 10 07:51:13 crc kubenswrapper[4765]: I1210 07:51:13.763076 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-g8lj2" podUID="9e0d3d13-633a-411d-9001-c61d8931680d" containerName="registry-server" containerID="cri-o://0e5e6fd0d4b193bb0db5687d613725823059ca18347a5ce59c56a5b7bfbaacd3" gracePeriod=2 Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.732119 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g8lj2" Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.775499 4765 generic.go:334] "Generic (PLEG): container finished" podID="9e0d3d13-633a-411d-9001-c61d8931680d" containerID="0e5e6fd0d4b193bb0db5687d613725823059ca18347a5ce59c56a5b7bfbaacd3" exitCode=0 Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.775578 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g8lj2" Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.775587 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g8lj2" event={"ID":"9e0d3d13-633a-411d-9001-c61d8931680d","Type":"ContainerDied","Data":"0e5e6fd0d4b193bb0db5687d613725823059ca18347a5ce59c56a5b7bfbaacd3"} Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.775623 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g8lj2" event={"ID":"9e0d3d13-633a-411d-9001-c61d8931680d","Type":"ContainerDied","Data":"7cd90cc372a6b8ceaed354766392774f5e8ee291b0a6e3576ce6a21aa3bfed57"} Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.775646 4765 scope.go:117] "RemoveContainer" containerID="0e5e6fd0d4b193bb0db5687d613725823059ca18347a5ce59c56a5b7bfbaacd3" Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.796257 4765 scope.go:117] "RemoveContainer" containerID="b8a2f2030bd5dad733727c3b2017240bcd3fe35fd961ca159a50b95ab3d2b78f" Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.831343 4765 scope.go:117] "RemoveContainer" containerID="a3677f062ba4b83f652a11b3bb274ac6b403f48f65d28c217f47197268a5c879" Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.848602 4765 scope.go:117] "RemoveContainer" containerID="0e5e6fd0d4b193bb0db5687d613725823059ca18347a5ce59c56a5b7bfbaacd3" Dec 10 07:51:14 crc kubenswrapper[4765]: E1210 07:51:14.851583 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e5e6fd0d4b193bb0db5687d613725823059ca18347a5ce59c56a5b7bfbaacd3\": container with ID starting with 0e5e6fd0d4b193bb0db5687d613725823059ca18347a5ce59c56a5b7bfbaacd3 not found: ID does not exist" containerID="0e5e6fd0d4b193bb0db5687d613725823059ca18347a5ce59c56a5b7bfbaacd3" Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.851630 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e5e6fd0d4b193bb0db5687d613725823059ca18347a5ce59c56a5b7bfbaacd3"} err="failed to get container status \"0e5e6fd0d4b193bb0db5687d613725823059ca18347a5ce59c56a5b7bfbaacd3\": rpc error: code = NotFound desc = could not find container \"0e5e6fd0d4b193bb0db5687d613725823059ca18347a5ce59c56a5b7bfbaacd3\": container with ID starting with 0e5e6fd0d4b193bb0db5687d613725823059ca18347a5ce59c56a5b7bfbaacd3 not found: ID does not exist" Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.851665 4765 scope.go:117] "RemoveContainer" containerID="b8a2f2030bd5dad733727c3b2017240bcd3fe35fd961ca159a50b95ab3d2b78f" Dec 10 07:51:14 crc kubenswrapper[4765]: E1210 07:51:14.852545 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8a2f2030bd5dad733727c3b2017240bcd3fe35fd961ca159a50b95ab3d2b78f\": container with ID starting with b8a2f2030bd5dad733727c3b2017240bcd3fe35fd961ca159a50b95ab3d2b78f not found: ID does not exist" containerID="b8a2f2030bd5dad733727c3b2017240bcd3fe35fd961ca159a50b95ab3d2b78f" Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.852616 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8a2f2030bd5dad733727c3b2017240bcd3fe35fd961ca159a50b95ab3d2b78f"} err="failed to get container status \"b8a2f2030bd5dad733727c3b2017240bcd3fe35fd961ca159a50b95ab3d2b78f\": rpc error: code = NotFound desc = could not find container \"b8a2f2030bd5dad733727c3b2017240bcd3fe35fd961ca159a50b95ab3d2b78f\": container with ID starting with b8a2f2030bd5dad733727c3b2017240bcd3fe35fd961ca159a50b95ab3d2b78f not found: ID does not exist" Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.852659 4765 scope.go:117] "RemoveContainer" containerID="a3677f062ba4b83f652a11b3bb274ac6b403f48f65d28c217f47197268a5c879" Dec 10 07:51:14 crc kubenswrapper[4765]: E1210 07:51:14.853005 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3677f062ba4b83f652a11b3bb274ac6b403f48f65d28c217f47197268a5c879\": container with ID starting with a3677f062ba4b83f652a11b3bb274ac6b403f48f65d28c217f47197268a5c879 not found: ID does not exist" containerID="a3677f062ba4b83f652a11b3bb274ac6b403f48f65d28c217f47197268a5c879" Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.853045 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3677f062ba4b83f652a11b3bb274ac6b403f48f65d28c217f47197268a5c879"} err="failed to get container status \"a3677f062ba4b83f652a11b3bb274ac6b403f48f65d28c217f47197268a5c879\": rpc error: code = NotFound desc = could not find container \"a3677f062ba4b83f652a11b3bb274ac6b403f48f65d28c217f47197268a5c879\": container with ID starting with a3677f062ba4b83f652a11b3bb274ac6b403f48f65d28c217f47197268a5c879 not found: ID does not exist" Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.930656 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4lxt\" (UniqueName: \"kubernetes.io/projected/9e0d3d13-633a-411d-9001-c61d8931680d-kube-api-access-b4lxt\") pod \"9e0d3d13-633a-411d-9001-c61d8931680d\" (UID: \"9e0d3d13-633a-411d-9001-c61d8931680d\") " Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.930827 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e0d3d13-633a-411d-9001-c61d8931680d-catalog-content\") pod \"9e0d3d13-633a-411d-9001-c61d8931680d\" (UID: \"9e0d3d13-633a-411d-9001-c61d8931680d\") " Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.930893 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e0d3d13-633a-411d-9001-c61d8931680d-utilities\") pod \"9e0d3d13-633a-411d-9001-c61d8931680d\" (UID: \"9e0d3d13-633a-411d-9001-c61d8931680d\") " Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.932019 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e0d3d13-633a-411d-9001-c61d8931680d-utilities" (OuterVolumeSpecName: "utilities") pod "9e0d3d13-633a-411d-9001-c61d8931680d" (UID: "9e0d3d13-633a-411d-9001-c61d8931680d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:51:14 crc kubenswrapper[4765]: I1210 07:51:14.935735 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e0d3d13-633a-411d-9001-c61d8931680d-kube-api-access-b4lxt" (OuterVolumeSpecName: "kube-api-access-b4lxt") pod "9e0d3d13-633a-411d-9001-c61d8931680d" (UID: "9e0d3d13-633a-411d-9001-c61d8931680d"). InnerVolumeSpecName "kube-api-access-b4lxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:51:15 crc kubenswrapper[4765]: I1210 07:51:15.032332 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4lxt\" (UniqueName: \"kubernetes.io/projected/9e0d3d13-633a-411d-9001-c61d8931680d-kube-api-access-b4lxt\") on node \"crc\" DevicePath \"\"" Dec 10 07:51:15 crc kubenswrapper[4765]: I1210 07:51:15.032376 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9e0d3d13-633a-411d-9001-c61d8931680d-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:51:15 crc kubenswrapper[4765]: I1210 07:51:15.047723 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e0d3d13-633a-411d-9001-c61d8931680d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9e0d3d13-633a-411d-9001-c61d8931680d" (UID: "9e0d3d13-633a-411d-9001-c61d8931680d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:51:15 crc kubenswrapper[4765]: I1210 07:51:15.114250 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-g8lj2"] Dec 10 07:51:15 crc kubenswrapper[4765]: I1210 07:51:15.120403 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-g8lj2"] Dec 10 07:51:15 crc kubenswrapper[4765]: I1210 07:51:15.133496 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9e0d3d13-633a-411d-9001-c61d8931680d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:51:15 crc kubenswrapper[4765]: I1210 07:51:15.589771 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:51:15 crc kubenswrapper[4765]: E1210 07:51:15.590161 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:51:16 crc kubenswrapper[4765]: I1210 07:51:16.599037 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e0d3d13-633a-411d-9001-c61d8931680d" path="/var/lib/kubelet/pods/9e0d3d13-633a-411d-9001-c61d8931680d/volumes" Dec 10 07:51:29 crc kubenswrapper[4765]: I1210 07:51:29.589521 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:51:29 crc kubenswrapper[4765]: E1210 07:51:29.590287 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:51:44 crc kubenswrapper[4765]: I1210 07:51:44.589971 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:51:44 crc kubenswrapper[4765]: E1210 07:51:44.590765 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:51:59 crc kubenswrapper[4765]: I1210 07:51:59.589302 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:51:59 crc kubenswrapper[4765]: E1210 07:51:59.590141 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:52:10 crc kubenswrapper[4765]: I1210 07:52:10.593307 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:52:10 crc kubenswrapper[4765]: E1210 07:52:10.595205 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:52:25 crc kubenswrapper[4765]: I1210 07:52:25.589360 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:52:25 crc kubenswrapper[4765]: E1210 07:52:25.590219 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:52:36 crc kubenswrapper[4765]: I1210 07:52:36.589665 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:52:36 crc kubenswrapper[4765]: E1210 07:52:36.590407 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:52:51 crc kubenswrapper[4765]: I1210 07:52:51.589831 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:52:51 crc kubenswrapper[4765]: E1210 07:52:51.590605 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:53:02 crc kubenswrapper[4765]: I1210 07:53:02.592942 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:53:02 crc kubenswrapper[4765]: E1210 07:53:02.593936 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:53:13 crc kubenswrapper[4765]: I1210 07:53:13.589067 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:53:13 crc kubenswrapper[4765]: E1210 07:53:13.589936 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:53:25 crc kubenswrapper[4765]: I1210 07:53:25.588949 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:53:25 crc kubenswrapper[4765]: E1210 07:53:25.589922 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:53:36 crc kubenswrapper[4765]: I1210 07:53:36.589660 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:53:36 crc kubenswrapper[4765]: E1210 07:53:36.590518 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:53:51 crc kubenswrapper[4765]: I1210 07:53:51.589682 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:53:51 crc kubenswrapper[4765]: E1210 07:53:51.591566 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:54:02 crc kubenswrapper[4765]: I1210 07:54:02.589062 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:54:02 crc kubenswrapper[4765]: E1210 07:54:02.591120 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 07:54:15 crc kubenswrapper[4765]: I1210 07:54:15.589709 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:54:16 crc kubenswrapper[4765]: I1210 07:54:16.169398 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"fb69862d770b3a99e6d1672526b9cfd663259d0384b956ca066d13aec349d21b"} Dec 10 07:56:34 crc kubenswrapper[4765]: I1210 07:56:34.050276 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:56:34 crc kubenswrapper[4765]: I1210 07:56:34.051142 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.071502 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hq7lm"] Dec 10 07:56:47 crc kubenswrapper[4765]: E1210 07:56:47.072391 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e0d3d13-633a-411d-9001-c61d8931680d" containerName="registry-server" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.072404 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e0d3d13-633a-411d-9001-c61d8931680d" containerName="registry-server" Dec 10 07:56:47 crc kubenswrapper[4765]: E1210 07:56:47.072428 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aade9ca-2423-4b08-b110-764470cce8d2" containerName="extract-content" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.072434 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aade9ca-2423-4b08-b110-764470cce8d2" containerName="extract-content" Dec 10 07:56:47 crc kubenswrapper[4765]: E1210 07:56:47.072447 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aade9ca-2423-4b08-b110-764470cce8d2" containerName="registry-server" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.072453 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aade9ca-2423-4b08-b110-764470cce8d2" containerName="registry-server" Dec 10 07:56:47 crc kubenswrapper[4765]: E1210 07:56:47.072472 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e0d3d13-633a-411d-9001-c61d8931680d" containerName="extract-utilities" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.072478 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e0d3d13-633a-411d-9001-c61d8931680d" containerName="extract-utilities" Dec 10 07:56:47 crc kubenswrapper[4765]: E1210 07:56:47.072490 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e0d3d13-633a-411d-9001-c61d8931680d" containerName="extract-content" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.072496 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e0d3d13-633a-411d-9001-c61d8931680d" containerName="extract-content" Dec 10 07:56:47 crc kubenswrapper[4765]: E1210 07:56:47.072508 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aade9ca-2423-4b08-b110-764470cce8d2" containerName="extract-utilities" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.072513 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aade9ca-2423-4b08-b110-764470cce8d2" containerName="extract-utilities" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.072703 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aade9ca-2423-4b08-b110-764470cce8d2" containerName="registry-server" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.072726 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e0d3d13-633a-411d-9001-c61d8931680d" containerName="registry-server" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.074546 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hq7lm" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.098157 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hq7lm"] Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.235962 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14816080-6b8f-4858-9f45-636bfc8110bf-utilities\") pod \"community-operators-hq7lm\" (UID: \"14816080-6b8f-4858-9f45-636bfc8110bf\") " pod="openshift-marketplace/community-operators-hq7lm" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.236019 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhq2t\" (UniqueName: \"kubernetes.io/projected/14816080-6b8f-4858-9f45-636bfc8110bf-kube-api-access-rhq2t\") pod \"community-operators-hq7lm\" (UID: \"14816080-6b8f-4858-9f45-636bfc8110bf\") " pod="openshift-marketplace/community-operators-hq7lm" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.236293 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14816080-6b8f-4858-9f45-636bfc8110bf-catalog-content\") pod \"community-operators-hq7lm\" (UID: \"14816080-6b8f-4858-9f45-636bfc8110bf\") " pod="openshift-marketplace/community-operators-hq7lm" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.337336 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14816080-6b8f-4858-9f45-636bfc8110bf-utilities\") pod \"community-operators-hq7lm\" (UID: \"14816080-6b8f-4858-9f45-636bfc8110bf\") " pod="openshift-marketplace/community-operators-hq7lm" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.337389 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhq2t\" (UniqueName: \"kubernetes.io/projected/14816080-6b8f-4858-9f45-636bfc8110bf-kube-api-access-rhq2t\") pod \"community-operators-hq7lm\" (UID: \"14816080-6b8f-4858-9f45-636bfc8110bf\") " pod="openshift-marketplace/community-operators-hq7lm" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.337481 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14816080-6b8f-4858-9f45-636bfc8110bf-catalog-content\") pod \"community-operators-hq7lm\" (UID: \"14816080-6b8f-4858-9f45-636bfc8110bf\") " pod="openshift-marketplace/community-operators-hq7lm" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.338331 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14816080-6b8f-4858-9f45-636bfc8110bf-utilities\") pod \"community-operators-hq7lm\" (UID: \"14816080-6b8f-4858-9f45-636bfc8110bf\") " pod="openshift-marketplace/community-operators-hq7lm" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.338494 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14816080-6b8f-4858-9f45-636bfc8110bf-catalog-content\") pod \"community-operators-hq7lm\" (UID: \"14816080-6b8f-4858-9f45-636bfc8110bf\") " pod="openshift-marketplace/community-operators-hq7lm" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.359201 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhq2t\" (UniqueName: \"kubernetes.io/projected/14816080-6b8f-4858-9f45-636bfc8110bf-kube-api-access-rhq2t\") pod \"community-operators-hq7lm\" (UID: \"14816080-6b8f-4858-9f45-636bfc8110bf\") " pod="openshift-marketplace/community-operators-hq7lm" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.426829 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hq7lm" Dec 10 07:56:47 crc kubenswrapper[4765]: I1210 07:56:47.987710 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hq7lm"] Dec 10 07:56:48 crc kubenswrapper[4765]: I1210 07:56:48.344719 4765 generic.go:334] "Generic (PLEG): container finished" podID="14816080-6b8f-4858-9f45-636bfc8110bf" containerID="7378cbbc1923ae1a551a85c43c018703b4aa684352531d3ee5df72c258ca8366" exitCode=0 Dec 10 07:56:48 crc kubenswrapper[4765]: I1210 07:56:48.344778 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hq7lm" event={"ID":"14816080-6b8f-4858-9f45-636bfc8110bf","Type":"ContainerDied","Data":"7378cbbc1923ae1a551a85c43c018703b4aa684352531d3ee5df72c258ca8366"} Dec 10 07:56:48 crc kubenswrapper[4765]: I1210 07:56:48.344814 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hq7lm" event={"ID":"14816080-6b8f-4858-9f45-636bfc8110bf","Type":"ContainerStarted","Data":"d24b46d4a17c850541eead51d3544f292805c4e28f721d17f8b2bfabe4d5bee1"} Dec 10 07:56:48 crc kubenswrapper[4765]: I1210 07:56:48.347272 4765 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 07:56:52 crc kubenswrapper[4765]: I1210 07:56:52.383630 4765 generic.go:334] "Generic (PLEG): container finished" podID="14816080-6b8f-4858-9f45-636bfc8110bf" containerID="d1f515a1e773d4f2bb1dafade272a2953e4954a417574b2122199361d2a4f07a" exitCode=0 Dec 10 07:56:52 crc kubenswrapper[4765]: I1210 07:56:52.383795 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hq7lm" event={"ID":"14816080-6b8f-4858-9f45-636bfc8110bf","Type":"ContainerDied","Data":"d1f515a1e773d4f2bb1dafade272a2953e4954a417574b2122199361d2a4f07a"} Dec 10 07:56:53 crc kubenswrapper[4765]: I1210 07:56:53.395168 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hq7lm" event={"ID":"14816080-6b8f-4858-9f45-636bfc8110bf","Type":"ContainerStarted","Data":"32fdd132f859ce85facae400ff8dba0eaf96f407e5232d9c8a375ea1d79dfa4b"} Dec 10 07:56:53 crc kubenswrapper[4765]: I1210 07:56:53.430081 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hq7lm" podStartSLOduration=1.609309296 podStartE2EDuration="6.430052744s" podCreationTimestamp="2025-12-10 07:56:47 +0000 UTC" firstStartedPulling="2025-12-10 07:56:48.346919603 +0000 UTC m=+4128.073584919" lastFinishedPulling="2025-12-10 07:56:53.167663051 +0000 UTC m=+4132.894328367" observedRunningTime="2025-12-10 07:56:53.422591781 +0000 UTC m=+4133.149257097" watchObservedRunningTime="2025-12-10 07:56:53.430052744 +0000 UTC m=+4133.156718060" Dec 10 07:56:57 crc kubenswrapper[4765]: I1210 07:56:57.427309 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hq7lm" Dec 10 07:56:57 crc kubenswrapper[4765]: I1210 07:56:57.427938 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hq7lm" Dec 10 07:56:57 crc kubenswrapper[4765]: I1210 07:56:57.477320 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hq7lm" Dec 10 07:56:58 crc kubenswrapper[4765]: I1210 07:56:58.472920 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hq7lm" Dec 10 07:56:58 crc kubenswrapper[4765]: I1210 07:56:58.544467 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hq7lm"] Dec 10 07:56:58 crc kubenswrapper[4765]: I1210 07:56:58.609217 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x65d6"] Dec 10 07:56:58 crc kubenswrapper[4765]: I1210 07:56:58.609545 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-x65d6" podUID="237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c" containerName="registry-server" containerID="cri-o://20906491490e7d6069a59255734e7cf3e4794d42c8ff7f117dcfe053893c5a73" gracePeriod=2 Dec 10 07:56:59 crc kubenswrapper[4765]: I1210 07:56:59.439890 4765 generic.go:334] "Generic (PLEG): container finished" podID="237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c" containerID="20906491490e7d6069a59255734e7cf3e4794d42c8ff7f117dcfe053893c5a73" exitCode=0 Dec 10 07:56:59 crc kubenswrapper[4765]: I1210 07:56:59.440857 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x65d6" event={"ID":"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c","Type":"ContainerDied","Data":"20906491490e7d6069a59255734e7cf3e4794d42c8ff7f117dcfe053893c5a73"} Dec 10 07:56:59 crc kubenswrapper[4765]: I1210 07:56:59.588549 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x65d6" Dec 10 07:56:59 crc kubenswrapper[4765]: I1210 07:56:59.648876 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c-catalog-content\") pod \"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c\" (UID: \"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c\") " Dec 10 07:56:59 crc kubenswrapper[4765]: I1210 07:56:59.648966 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gn7th\" (UniqueName: \"kubernetes.io/projected/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c-kube-api-access-gn7th\") pod \"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c\" (UID: \"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c\") " Dec 10 07:56:59 crc kubenswrapper[4765]: I1210 07:56:59.649063 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c-utilities\") pod \"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c\" (UID: \"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c\") " Dec 10 07:56:59 crc kubenswrapper[4765]: I1210 07:56:59.649821 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c-utilities" (OuterVolumeSpecName: "utilities") pod "237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c" (UID: "237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:56:59 crc kubenswrapper[4765]: I1210 07:56:59.654902 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c-kube-api-access-gn7th" (OuterVolumeSpecName: "kube-api-access-gn7th") pod "237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c" (UID: "237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c"). InnerVolumeSpecName "kube-api-access-gn7th". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:56:59 crc kubenswrapper[4765]: I1210 07:56:59.698071 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c" (UID: "237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:56:59 crc kubenswrapper[4765]: I1210 07:56:59.751198 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:56:59 crc kubenswrapper[4765]: I1210 07:56:59.751235 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gn7th\" (UniqueName: \"kubernetes.io/projected/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c-kube-api-access-gn7th\") on node \"crc\" DevicePath \"\"" Dec 10 07:56:59 crc kubenswrapper[4765]: I1210 07:56:59.751250 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:57:00 crc kubenswrapper[4765]: I1210 07:57:00.452441 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x65d6" event={"ID":"237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c","Type":"ContainerDied","Data":"92555d7f32ad221a69782b3deb67178c449e002a2f0177cde0f878cdc45bb93c"} Dec 10 07:57:00 crc kubenswrapper[4765]: I1210 07:57:00.452527 4765 scope.go:117] "RemoveContainer" containerID="20906491490e7d6069a59255734e7cf3e4794d42c8ff7f117dcfe053893c5a73" Dec 10 07:57:00 crc kubenswrapper[4765]: I1210 07:57:00.452468 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x65d6" Dec 10 07:57:00 crc kubenswrapper[4765]: I1210 07:57:00.490400 4765 scope.go:117] "RemoveContainer" containerID="665bfcdd64158189627806224a38b9a6366ea55ebc5ac6f6265522634ab3c811" Dec 10 07:57:00 crc kubenswrapper[4765]: I1210 07:57:00.497339 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x65d6"] Dec 10 07:57:00 crc kubenswrapper[4765]: I1210 07:57:00.504061 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-x65d6"] Dec 10 07:57:00 crc kubenswrapper[4765]: I1210 07:57:00.519664 4765 scope.go:117] "RemoveContainer" containerID="1d553645481de4b548336dd6d184e0a9749f29f661102671aed4d0b8f5090d9a" Dec 10 07:57:00 crc kubenswrapper[4765]: I1210 07:57:00.599468 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c" path="/var/lib/kubelet/pods/237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c/volumes" Dec 10 07:57:04 crc kubenswrapper[4765]: I1210 07:57:04.049731 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:57:04 crc kubenswrapper[4765]: I1210 07:57:04.050042 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.567915 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-5ccw4"] Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.574106 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-5ccw4"] Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.598742 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0650ddc2-3ecc-49d5-bd29-54455ff56ebc" path="/var/lib/kubelet/pods/0650ddc2-3ecc-49d5-bd29-54455ff56ebc/volumes" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.749374 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-65v2p"] Dec 10 07:57:06 crc kubenswrapper[4765]: E1210 07:57:06.749712 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c" containerName="extract-content" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.749729 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c" containerName="extract-content" Dec 10 07:57:06 crc kubenswrapper[4765]: E1210 07:57:06.749751 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c" containerName="extract-utilities" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.749757 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c" containerName="extract-utilities" Dec 10 07:57:06 crc kubenswrapper[4765]: E1210 07:57:06.749766 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c" containerName="registry-server" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.749772 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c" containerName="registry-server" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.749906 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="237c5fb0-13a8-4c3b-af3a-aca5dfe5b86c" containerName="registry-server" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.750436 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-65v2p" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.753133 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.753355 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.753613 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.756390 4765 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-2xn5f" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.761396 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-65v2p"] Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.850159 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4t9gf\" (UniqueName: \"kubernetes.io/projected/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1-kube-api-access-4t9gf\") pod \"crc-storage-crc-65v2p\" (UID: \"f52d1a7b-23ed-44c7-883f-c8aca6dda9b1\") " pod="crc-storage/crc-storage-crc-65v2p" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.850257 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1-crc-storage\") pod \"crc-storage-crc-65v2p\" (UID: \"f52d1a7b-23ed-44c7-883f-c8aca6dda9b1\") " pod="crc-storage/crc-storage-crc-65v2p" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.850293 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1-node-mnt\") pod \"crc-storage-crc-65v2p\" (UID: \"f52d1a7b-23ed-44c7-883f-c8aca6dda9b1\") " pod="crc-storage/crc-storage-crc-65v2p" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.951305 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1-node-mnt\") pod \"crc-storage-crc-65v2p\" (UID: \"f52d1a7b-23ed-44c7-883f-c8aca6dda9b1\") " pod="crc-storage/crc-storage-crc-65v2p" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.951414 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4t9gf\" (UniqueName: \"kubernetes.io/projected/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1-kube-api-access-4t9gf\") pod \"crc-storage-crc-65v2p\" (UID: \"f52d1a7b-23ed-44c7-883f-c8aca6dda9b1\") " pod="crc-storage/crc-storage-crc-65v2p" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.951501 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1-crc-storage\") pod \"crc-storage-crc-65v2p\" (UID: \"f52d1a7b-23ed-44c7-883f-c8aca6dda9b1\") " pod="crc-storage/crc-storage-crc-65v2p" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.951637 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1-node-mnt\") pod \"crc-storage-crc-65v2p\" (UID: \"f52d1a7b-23ed-44c7-883f-c8aca6dda9b1\") " pod="crc-storage/crc-storage-crc-65v2p" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.952511 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1-crc-storage\") pod \"crc-storage-crc-65v2p\" (UID: \"f52d1a7b-23ed-44c7-883f-c8aca6dda9b1\") " pod="crc-storage/crc-storage-crc-65v2p" Dec 10 07:57:06 crc kubenswrapper[4765]: I1210 07:57:06.976641 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4t9gf\" (UniqueName: \"kubernetes.io/projected/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1-kube-api-access-4t9gf\") pod \"crc-storage-crc-65v2p\" (UID: \"f52d1a7b-23ed-44c7-883f-c8aca6dda9b1\") " pod="crc-storage/crc-storage-crc-65v2p" Dec 10 07:57:07 crc kubenswrapper[4765]: I1210 07:57:07.068528 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-65v2p" Dec 10 07:57:07 crc kubenswrapper[4765]: I1210 07:57:07.476266 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-65v2p"] Dec 10 07:57:07 crc kubenswrapper[4765]: I1210 07:57:07.514484 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-65v2p" event={"ID":"f52d1a7b-23ed-44c7-883f-c8aca6dda9b1","Type":"ContainerStarted","Data":"b623ff6753389adc245535857525a77e0418fda5ad228d4beaa8d0c0835af0d6"} Dec 10 07:57:09 crc kubenswrapper[4765]: I1210 07:57:09.532048 4765 generic.go:334] "Generic (PLEG): container finished" podID="f52d1a7b-23ed-44c7-883f-c8aca6dda9b1" containerID="415bcb3c819b3f2535d669966ffbbbde31704289626fef72b7e8d1a12198e87a" exitCode=0 Dec 10 07:57:09 crc kubenswrapper[4765]: I1210 07:57:09.532581 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-65v2p" event={"ID":"f52d1a7b-23ed-44c7-883f-c8aca6dda9b1","Type":"ContainerDied","Data":"415bcb3c819b3f2535d669966ffbbbde31704289626fef72b7e8d1a12198e87a"} Dec 10 07:57:10 crc kubenswrapper[4765]: I1210 07:57:10.796591 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-65v2p" Dec 10 07:57:10 crc kubenswrapper[4765]: I1210 07:57:10.903429 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4t9gf\" (UniqueName: \"kubernetes.io/projected/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1-kube-api-access-4t9gf\") pod \"f52d1a7b-23ed-44c7-883f-c8aca6dda9b1\" (UID: \"f52d1a7b-23ed-44c7-883f-c8aca6dda9b1\") " Dec 10 07:57:10 crc kubenswrapper[4765]: I1210 07:57:10.903494 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1-crc-storage\") pod \"f52d1a7b-23ed-44c7-883f-c8aca6dda9b1\" (UID: \"f52d1a7b-23ed-44c7-883f-c8aca6dda9b1\") " Dec 10 07:57:10 crc kubenswrapper[4765]: I1210 07:57:10.903586 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1-node-mnt\") pod \"f52d1a7b-23ed-44c7-883f-c8aca6dda9b1\" (UID: \"f52d1a7b-23ed-44c7-883f-c8aca6dda9b1\") " Dec 10 07:57:10 crc kubenswrapper[4765]: I1210 07:57:10.903776 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "f52d1a7b-23ed-44c7-883f-c8aca6dda9b1" (UID: "f52d1a7b-23ed-44c7-883f-c8aca6dda9b1"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:57:10 crc kubenswrapper[4765]: I1210 07:57:10.904101 4765 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1-node-mnt\") on node \"crc\" DevicePath \"\"" Dec 10 07:57:10 crc kubenswrapper[4765]: I1210 07:57:10.908786 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1-kube-api-access-4t9gf" (OuterVolumeSpecName: "kube-api-access-4t9gf") pod "f52d1a7b-23ed-44c7-883f-c8aca6dda9b1" (UID: "f52d1a7b-23ed-44c7-883f-c8aca6dda9b1"). InnerVolumeSpecName "kube-api-access-4t9gf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:57:10 crc kubenswrapper[4765]: I1210 07:57:10.922789 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "f52d1a7b-23ed-44c7-883f-c8aca6dda9b1" (UID: "f52d1a7b-23ed-44c7-883f-c8aca6dda9b1"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:57:11 crc kubenswrapper[4765]: I1210 07:57:11.005652 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4t9gf\" (UniqueName: \"kubernetes.io/projected/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1-kube-api-access-4t9gf\") on node \"crc\" DevicePath \"\"" Dec 10 07:57:11 crc kubenswrapper[4765]: I1210 07:57:11.005689 4765 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1-crc-storage\") on node \"crc\" DevicePath \"\"" Dec 10 07:57:11 crc kubenswrapper[4765]: I1210 07:57:11.557427 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-65v2p" event={"ID":"f52d1a7b-23ed-44c7-883f-c8aca6dda9b1","Type":"ContainerDied","Data":"b623ff6753389adc245535857525a77e0418fda5ad228d4beaa8d0c0835af0d6"} Dec 10 07:57:11 crc kubenswrapper[4765]: I1210 07:57:11.557741 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b623ff6753389adc245535857525a77e0418fda5ad228d4beaa8d0c0835af0d6" Dec 10 07:57:11 crc kubenswrapper[4765]: I1210 07:57:11.557533 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-65v2p" Dec 10 07:57:12 crc kubenswrapper[4765]: I1210 07:57:12.992720 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-65v2p"] Dec 10 07:57:12 crc kubenswrapper[4765]: I1210 07:57:12.998126 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-65v2p"] Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.134434 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-qdw82"] Dec 10 07:57:13 crc kubenswrapper[4765]: E1210 07:57:13.134772 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f52d1a7b-23ed-44c7-883f-c8aca6dda9b1" containerName="storage" Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.134790 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f52d1a7b-23ed-44c7-883f-c8aca6dda9b1" containerName="storage" Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.134946 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="f52d1a7b-23ed-44c7-883f-c8aca6dda9b1" containerName="storage" Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.135455 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-qdw82" Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.137633 4765 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-2xn5f" Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.137887 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.137977 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.140167 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.159761 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-qdw82"] Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.235613 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txnp9\" (UniqueName: \"kubernetes.io/projected/0513628e-48fd-42c2-8bed-3872c974481c-kube-api-access-txnp9\") pod \"crc-storage-crc-qdw82\" (UID: \"0513628e-48fd-42c2-8bed-3872c974481c\") " pod="crc-storage/crc-storage-crc-qdw82" Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.235709 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/0513628e-48fd-42c2-8bed-3872c974481c-crc-storage\") pod \"crc-storage-crc-qdw82\" (UID: \"0513628e-48fd-42c2-8bed-3872c974481c\") " pod="crc-storage/crc-storage-crc-qdw82" Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.235934 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/0513628e-48fd-42c2-8bed-3872c974481c-node-mnt\") pod \"crc-storage-crc-qdw82\" (UID: \"0513628e-48fd-42c2-8bed-3872c974481c\") " pod="crc-storage/crc-storage-crc-qdw82" Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.336962 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/0513628e-48fd-42c2-8bed-3872c974481c-crc-storage\") pod \"crc-storage-crc-qdw82\" (UID: \"0513628e-48fd-42c2-8bed-3872c974481c\") " pod="crc-storage/crc-storage-crc-qdw82" Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.337053 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/0513628e-48fd-42c2-8bed-3872c974481c-node-mnt\") pod \"crc-storage-crc-qdw82\" (UID: \"0513628e-48fd-42c2-8bed-3872c974481c\") " pod="crc-storage/crc-storage-crc-qdw82" Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.337132 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txnp9\" (UniqueName: \"kubernetes.io/projected/0513628e-48fd-42c2-8bed-3872c974481c-kube-api-access-txnp9\") pod \"crc-storage-crc-qdw82\" (UID: \"0513628e-48fd-42c2-8bed-3872c974481c\") " pod="crc-storage/crc-storage-crc-qdw82" Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.337317 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/0513628e-48fd-42c2-8bed-3872c974481c-node-mnt\") pod \"crc-storage-crc-qdw82\" (UID: \"0513628e-48fd-42c2-8bed-3872c974481c\") " pod="crc-storage/crc-storage-crc-qdw82" Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.337745 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/0513628e-48fd-42c2-8bed-3872c974481c-crc-storage\") pod \"crc-storage-crc-qdw82\" (UID: \"0513628e-48fd-42c2-8bed-3872c974481c\") " pod="crc-storage/crc-storage-crc-qdw82" Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.355034 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txnp9\" (UniqueName: \"kubernetes.io/projected/0513628e-48fd-42c2-8bed-3872c974481c-kube-api-access-txnp9\") pod \"crc-storage-crc-qdw82\" (UID: \"0513628e-48fd-42c2-8bed-3872c974481c\") " pod="crc-storage/crc-storage-crc-qdw82" Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.452899 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-qdw82" Dec 10 07:57:13 crc kubenswrapper[4765]: I1210 07:57:13.867942 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-qdw82"] Dec 10 07:57:13 crc kubenswrapper[4765]: W1210 07:57:13.876719 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0513628e_48fd_42c2_8bed_3872c974481c.slice/crio-46a1aa49c0b049f2b632bd4f7bac0ad21569983ed1d9b8f2f86ec14f6f84f665 WatchSource:0}: Error finding container 46a1aa49c0b049f2b632bd4f7bac0ad21569983ed1d9b8f2f86ec14f6f84f665: Status 404 returned error can't find the container with id 46a1aa49c0b049f2b632bd4f7bac0ad21569983ed1d9b8f2f86ec14f6f84f665 Dec 10 07:57:14 crc kubenswrapper[4765]: I1210 07:57:14.599579 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f52d1a7b-23ed-44c7-883f-c8aca6dda9b1" path="/var/lib/kubelet/pods/f52d1a7b-23ed-44c7-883f-c8aca6dda9b1/volumes" Dec 10 07:57:14 crc kubenswrapper[4765]: I1210 07:57:14.600259 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-qdw82" event={"ID":"0513628e-48fd-42c2-8bed-3872c974481c","Type":"ContainerStarted","Data":"46a1aa49c0b049f2b632bd4f7bac0ad21569983ed1d9b8f2f86ec14f6f84f665"} Dec 10 07:57:15 crc kubenswrapper[4765]: I1210 07:57:15.600639 4765 generic.go:334] "Generic (PLEG): container finished" podID="0513628e-48fd-42c2-8bed-3872c974481c" containerID="b0aec466ff994aadc7e4c2ea8b843e2f1773a19072b9aa3451ed5466721a68fc" exitCode=0 Dec 10 07:57:15 crc kubenswrapper[4765]: I1210 07:57:15.600757 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-qdw82" event={"ID":"0513628e-48fd-42c2-8bed-3872c974481c","Type":"ContainerDied","Data":"b0aec466ff994aadc7e4c2ea8b843e2f1773a19072b9aa3451ed5466721a68fc"} Dec 10 07:57:16 crc kubenswrapper[4765]: I1210 07:57:16.886593 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-qdw82" Dec 10 07:57:16 crc kubenswrapper[4765]: I1210 07:57:16.998497 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/0513628e-48fd-42c2-8bed-3872c974481c-crc-storage\") pod \"0513628e-48fd-42c2-8bed-3872c974481c\" (UID: \"0513628e-48fd-42c2-8bed-3872c974481c\") " Dec 10 07:57:16 crc kubenswrapper[4765]: I1210 07:57:16.998588 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/0513628e-48fd-42c2-8bed-3872c974481c-node-mnt\") pod \"0513628e-48fd-42c2-8bed-3872c974481c\" (UID: \"0513628e-48fd-42c2-8bed-3872c974481c\") " Dec 10 07:57:16 crc kubenswrapper[4765]: I1210 07:57:16.998620 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txnp9\" (UniqueName: \"kubernetes.io/projected/0513628e-48fd-42c2-8bed-3872c974481c-kube-api-access-txnp9\") pod \"0513628e-48fd-42c2-8bed-3872c974481c\" (UID: \"0513628e-48fd-42c2-8bed-3872c974481c\") " Dec 10 07:57:16 crc kubenswrapper[4765]: I1210 07:57:16.998951 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0513628e-48fd-42c2-8bed-3872c974481c-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "0513628e-48fd-42c2-8bed-3872c974481c" (UID: "0513628e-48fd-42c2-8bed-3872c974481c"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 07:57:16 crc kubenswrapper[4765]: I1210 07:57:16.999684 4765 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/0513628e-48fd-42c2-8bed-3872c974481c-node-mnt\") on node \"crc\" DevicePath \"\"" Dec 10 07:57:17 crc kubenswrapper[4765]: I1210 07:57:17.004800 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0513628e-48fd-42c2-8bed-3872c974481c-kube-api-access-txnp9" (OuterVolumeSpecName: "kube-api-access-txnp9") pod "0513628e-48fd-42c2-8bed-3872c974481c" (UID: "0513628e-48fd-42c2-8bed-3872c974481c"). InnerVolumeSpecName "kube-api-access-txnp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:57:17 crc kubenswrapper[4765]: I1210 07:57:17.020657 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0513628e-48fd-42c2-8bed-3872c974481c-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "0513628e-48fd-42c2-8bed-3872c974481c" (UID: "0513628e-48fd-42c2-8bed-3872c974481c"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 07:57:17 crc kubenswrapper[4765]: I1210 07:57:17.101497 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txnp9\" (UniqueName: \"kubernetes.io/projected/0513628e-48fd-42c2-8bed-3872c974481c-kube-api-access-txnp9\") on node \"crc\" DevicePath \"\"" Dec 10 07:57:17 crc kubenswrapper[4765]: I1210 07:57:17.101579 4765 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/0513628e-48fd-42c2-8bed-3872c974481c-crc-storage\") on node \"crc\" DevicePath \"\"" Dec 10 07:57:17 crc kubenswrapper[4765]: I1210 07:57:17.614532 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-qdw82" event={"ID":"0513628e-48fd-42c2-8bed-3872c974481c","Type":"ContainerDied","Data":"46a1aa49c0b049f2b632bd4f7bac0ad21569983ed1d9b8f2f86ec14f6f84f665"} Dec 10 07:57:17 crc kubenswrapper[4765]: I1210 07:57:17.615070 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46a1aa49c0b049f2b632bd4f7bac0ad21569983ed1d9b8f2f86ec14f6f84f665" Dec 10 07:57:17 crc kubenswrapper[4765]: I1210 07:57:17.614564 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-qdw82" Dec 10 07:57:30 crc kubenswrapper[4765]: I1210 07:57:30.117708 4765 scope.go:117] "RemoveContainer" containerID="2e0049df642baac550027bf78f400c731b96917d76174c1af3703380175c40ba" Dec 10 07:57:34 crc kubenswrapper[4765]: I1210 07:57:34.049652 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:57:34 crc kubenswrapper[4765]: I1210 07:57:34.050962 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:57:34 crc kubenswrapper[4765]: I1210 07:57:34.051074 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 07:57:34 crc kubenswrapper[4765]: I1210 07:57:34.052218 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fb69862d770b3a99e6d1672526b9cfd663259d0384b956ca066d13aec349d21b"} pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 07:57:34 crc kubenswrapper[4765]: I1210 07:57:34.052280 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" containerID="cri-o://fb69862d770b3a99e6d1672526b9cfd663259d0384b956ca066d13aec349d21b" gracePeriod=600 Dec 10 07:57:34 crc kubenswrapper[4765]: I1210 07:57:34.743803 4765 generic.go:334] "Generic (PLEG): container finished" podID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerID="fb69862d770b3a99e6d1672526b9cfd663259d0384b956ca066d13aec349d21b" exitCode=0 Dec 10 07:57:34 crc kubenswrapper[4765]: I1210 07:57:34.743881 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerDied","Data":"fb69862d770b3a99e6d1672526b9cfd663259d0384b956ca066d13aec349d21b"} Dec 10 07:57:34 crc kubenswrapper[4765]: I1210 07:57:34.744175 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286"} Dec 10 07:57:34 crc kubenswrapper[4765]: I1210 07:57:34.744195 4765 scope.go:117] "RemoveContainer" containerID="416de98f2849cb19d84d9b994bc2207531cdd1985c9d6acbd06bd8f0424c7271" Dec 10 07:59:34 crc kubenswrapper[4765]: I1210 07:59:34.049433 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 07:59:34 crc kubenswrapper[4765]: I1210 07:59:34.050046 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 07:59:43 crc kubenswrapper[4765]: I1210 07:59:43.032792 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-g5dr5"] Dec 10 07:59:43 crc kubenswrapper[4765]: E1210 07:59:43.033802 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0513628e-48fd-42c2-8bed-3872c974481c" containerName="storage" Dec 10 07:59:43 crc kubenswrapper[4765]: I1210 07:59:43.033821 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="0513628e-48fd-42c2-8bed-3872c974481c" containerName="storage" Dec 10 07:59:43 crc kubenswrapper[4765]: I1210 07:59:43.034109 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="0513628e-48fd-42c2-8bed-3872c974481c" containerName="storage" Dec 10 07:59:43 crc kubenswrapper[4765]: I1210 07:59:43.035359 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g5dr5" Dec 10 07:59:43 crc kubenswrapper[4765]: I1210 07:59:43.052831 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5dr5"] Dec 10 07:59:43 crc kubenswrapper[4765]: I1210 07:59:43.236299 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-989th\" (UniqueName: \"kubernetes.io/projected/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09-kube-api-access-989th\") pod \"redhat-marketplace-g5dr5\" (UID: \"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09\") " pod="openshift-marketplace/redhat-marketplace-g5dr5" Dec 10 07:59:43 crc kubenswrapper[4765]: I1210 07:59:43.236378 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09-utilities\") pod \"redhat-marketplace-g5dr5\" (UID: \"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09\") " pod="openshift-marketplace/redhat-marketplace-g5dr5" Dec 10 07:59:43 crc kubenswrapper[4765]: I1210 07:59:43.236567 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09-catalog-content\") pod \"redhat-marketplace-g5dr5\" (UID: \"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09\") " pod="openshift-marketplace/redhat-marketplace-g5dr5" Dec 10 07:59:43 crc kubenswrapper[4765]: I1210 07:59:43.338297 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-989th\" (UniqueName: \"kubernetes.io/projected/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09-kube-api-access-989th\") pod \"redhat-marketplace-g5dr5\" (UID: \"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09\") " pod="openshift-marketplace/redhat-marketplace-g5dr5" Dec 10 07:59:43 crc kubenswrapper[4765]: I1210 07:59:43.338365 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09-utilities\") pod \"redhat-marketplace-g5dr5\" (UID: \"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09\") " pod="openshift-marketplace/redhat-marketplace-g5dr5" Dec 10 07:59:43 crc kubenswrapper[4765]: I1210 07:59:43.338415 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09-catalog-content\") pod \"redhat-marketplace-g5dr5\" (UID: \"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09\") " pod="openshift-marketplace/redhat-marketplace-g5dr5" Dec 10 07:59:43 crc kubenswrapper[4765]: I1210 07:59:43.339003 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09-catalog-content\") pod \"redhat-marketplace-g5dr5\" (UID: \"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09\") " pod="openshift-marketplace/redhat-marketplace-g5dr5" Dec 10 07:59:43 crc kubenswrapper[4765]: I1210 07:59:43.339181 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09-utilities\") pod \"redhat-marketplace-g5dr5\" (UID: \"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09\") " pod="openshift-marketplace/redhat-marketplace-g5dr5" Dec 10 07:59:43 crc kubenswrapper[4765]: I1210 07:59:43.367765 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-989th\" (UniqueName: \"kubernetes.io/projected/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09-kube-api-access-989th\") pod \"redhat-marketplace-g5dr5\" (UID: \"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09\") " pod="openshift-marketplace/redhat-marketplace-g5dr5" Dec 10 07:59:43 crc kubenswrapper[4765]: I1210 07:59:43.654738 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g5dr5" Dec 10 07:59:44 crc kubenswrapper[4765]: I1210 07:59:44.081949 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5dr5"] Dec 10 07:59:44 crc kubenswrapper[4765]: I1210 07:59:44.694169 4765 generic.go:334] "Generic (PLEG): container finished" podID="e1dfa681-234a-40bb-9d40-bb1cfa4b2d09" containerID="a3dc9f58c7cbfcb177480b0b3ff42ac67c5458705e2b42a3276d01bd52e491a3" exitCode=0 Dec 10 07:59:44 crc kubenswrapper[4765]: I1210 07:59:44.694636 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5dr5" event={"ID":"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09","Type":"ContainerDied","Data":"a3dc9f58c7cbfcb177480b0b3ff42ac67c5458705e2b42a3276d01bd52e491a3"} Dec 10 07:59:44 crc kubenswrapper[4765]: I1210 07:59:44.694692 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5dr5" event={"ID":"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09","Type":"ContainerStarted","Data":"f2679618d3306f56e645dd6d17706fbf960ecaaa7cd7bde7a823edadb2af3cd4"} Dec 10 07:59:45 crc kubenswrapper[4765]: I1210 07:59:45.703139 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5dr5" event={"ID":"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09","Type":"ContainerStarted","Data":"96e78ed06e9831bcf2fdc7c01efb3061a087d36f86ea978e1d28026afa7b2fc4"} Dec 10 07:59:46 crc kubenswrapper[4765]: I1210 07:59:46.712930 4765 generic.go:334] "Generic (PLEG): container finished" podID="e1dfa681-234a-40bb-9d40-bb1cfa4b2d09" containerID="96e78ed06e9831bcf2fdc7c01efb3061a087d36f86ea978e1d28026afa7b2fc4" exitCode=0 Dec 10 07:59:46 crc kubenswrapper[4765]: I1210 07:59:46.712987 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5dr5" event={"ID":"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09","Type":"ContainerDied","Data":"96e78ed06e9831bcf2fdc7c01efb3061a087d36f86ea978e1d28026afa7b2fc4"} Dec 10 07:59:47 crc kubenswrapper[4765]: I1210 07:59:47.727918 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5dr5" event={"ID":"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09","Type":"ContainerStarted","Data":"aac15eb75d6bbb22f8b86720edc6c78b6eb4759239072313b22ed7cb5731e057"} Dec 10 07:59:53 crc kubenswrapper[4765]: I1210 07:59:53.655631 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-g5dr5" Dec 10 07:59:53 crc kubenswrapper[4765]: I1210 07:59:53.657286 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-g5dr5" Dec 10 07:59:53 crc kubenswrapper[4765]: I1210 07:59:53.703715 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-g5dr5" Dec 10 07:59:53 crc kubenswrapper[4765]: I1210 07:59:53.731808 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-g5dr5" podStartSLOduration=8.057920098 podStartE2EDuration="10.731777351s" podCreationTimestamp="2025-12-10 07:59:43 +0000 UTC" firstStartedPulling="2025-12-10 07:59:44.698970183 +0000 UTC m=+4304.425635489" lastFinishedPulling="2025-12-10 07:59:47.372827426 +0000 UTC m=+4307.099492742" observedRunningTime="2025-12-10 07:59:47.754773288 +0000 UTC m=+4307.481438604" watchObservedRunningTime="2025-12-10 07:59:53.731777351 +0000 UTC m=+4313.458442667" Dec 10 07:59:53 crc kubenswrapper[4765]: I1210 07:59:53.821062 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-g5dr5" Dec 10 07:59:53 crc kubenswrapper[4765]: I1210 07:59:53.940024 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5dr5"] Dec 10 07:59:55 crc kubenswrapper[4765]: I1210 07:59:55.781905 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-g5dr5" podUID="e1dfa681-234a-40bb-9d40-bb1cfa4b2d09" containerName="registry-server" containerID="cri-o://aac15eb75d6bbb22f8b86720edc6c78b6eb4759239072313b22ed7cb5731e057" gracePeriod=2 Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.674525 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g5dr5" Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.764049 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-989th\" (UniqueName: \"kubernetes.io/projected/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09-kube-api-access-989th\") pod \"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09\" (UID: \"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09\") " Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.764208 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09-catalog-content\") pod \"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09\" (UID: \"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09\") " Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.766419 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09-utilities\") pod \"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09\" (UID: \"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09\") " Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.767247 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09-utilities" (OuterVolumeSpecName: "utilities") pod "e1dfa681-234a-40bb-9d40-bb1cfa4b2d09" (UID: "e1dfa681-234a-40bb-9d40-bb1cfa4b2d09"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.770670 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09-kube-api-access-989th" (OuterVolumeSpecName: "kube-api-access-989th") pod "e1dfa681-234a-40bb-9d40-bb1cfa4b2d09" (UID: "e1dfa681-234a-40bb-9d40-bb1cfa4b2d09"). InnerVolumeSpecName "kube-api-access-989th". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.790598 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e1dfa681-234a-40bb-9d40-bb1cfa4b2d09" (UID: "e1dfa681-234a-40bb-9d40-bb1cfa4b2d09"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.793941 4765 generic.go:334] "Generic (PLEG): container finished" podID="e1dfa681-234a-40bb-9d40-bb1cfa4b2d09" containerID="aac15eb75d6bbb22f8b86720edc6c78b6eb4759239072313b22ed7cb5731e057" exitCode=0 Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.793984 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5dr5" event={"ID":"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09","Type":"ContainerDied","Data":"aac15eb75d6bbb22f8b86720edc6c78b6eb4759239072313b22ed7cb5731e057"} Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.794020 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5dr5" event={"ID":"e1dfa681-234a-40bb-9d40-bb1cfa4b2d09","Type":"ContainerDied","Data":"f2679618d3306f56e645dd6d17706fbf960ecaaa7cd7bde7a823edadb2af3cd4"} Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.794048 4765 scope.go:117] "RemoveContainer" containerID="aac15eb75d6bbb22f8b86720edc6c78b6eb4759239072313b22ed7cb5731e057" Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.794065 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g5dr5" Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.814357 4765 scope.go:117] "RemoveContainer" containerID="96e78ed06e9831bcf2fdc7c01efb3061a087d36f86ea978e1d28026afa7b2fc4" Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.837587 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5dr5"] Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.848127 4765 scope.go:117] "RemoveContainer" containerID="a3dc9f58c7cbfcb177480b0b3ff42ac67c5458705e2b42a3276d01bd52e491a3" Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.848821 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5dr5"] Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.870736 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-989th\" (UniqueName: \"kubernetes.io/projected/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09-kube-api-access-989th\") on node \"crc\" DevicePath \"\"" Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.870771 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.870782 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.870899 4765 scope.go:117] "RemoveContainer" containerID="aac15eb75d6bbb22f8b86720edc6c78b6eb4759239072313b22ed7cb5731e057" Dec 10 07:59:56 crc kubenswrapper[4765]: E1210 07:59:56.871369 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aac15eb75d6bbb22f8b86720edc6c78b6eb4759239072313b22ed7cb5731e057\": container with ID starting with aac15eb75d6bbb22f8b86720edc6c78b6eb4759239072313b22ed7cb5731e057 not found: ID does not exist" containerID="aac15eb75d6bbb22f8b86720edc6c78b6eb4759239072313b22ed7cb5731e057" Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.871414 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aac15eb75d6bbb22f8b86720edc6c78b6eb4759239072313b22ed7cb5731e057"} err="failed to get container status \"aac15eb75d6bbb22f8b86720edc6c78b6eb4759239072313b22ed7cb5731e057\": rpc error: code = NotFound desc = could not find container \"aac15eb75d6bbb22f8b86720edc6c78b6eb4759239072313b22ed7cb5731e057\": container with ID starting with aac15eb75d6bbb22f8b86720edc6c78b6eb4759239072313b22ed7cb5731e057 not found: ID does not exist" Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.871435 4765 scope.go:117] "RemoveContainer" containerID="96e78ed06e9831bcf2fdc7c01efb3061a087d36f86ea978e1d28026afa7b2fc4" Dec 10 07:59:56 crc kubenswrapper[4765]: E1210 07:59:56.871634 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96e78ed06e9831bcf2fdc7c01efb3061a087d36f86ea978e1d28026afa7b2fc4\": container with ID starting with 96e78ed06e9831bcf2fdc7c01efb3061a087d36f86ea978e1d28026afa7b2fc4 not found: ID does not exist" containerID="96e78ed06e9831bcf2fdc7c01efb3061a087d36f86ea978e1d28026afa7b2fc4" Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.871675 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96e78ed06e9831bcf2fdc7c01efb3061a087d36f86ea978e1d28026afa7b2fc4"} err="failed to get container status \"96e78ed06e9831bcf2fdc7c01efb3061a087d36f86ea978e1d28026afa7b2fc4\": rpc error: code = NotFound desc = could not find container \"96e78ed06e9831bcf2fdc7c01efb3061a087d36f86ea978e1d28026afa7b2fc4\": container with ID starting with 96e78ed06e9831bcf2fdc7c01efb3061a087d36f86ea978e1d28026afa7b2fc4 not found: ID does not exist" Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.871690 4765 scope.go:117] "RemoveContainer" containerID="a3dc9f58c7cbfcb177480b0b3ff42ac67c5458705e2b42a3276d01bd52e491a3" Dec 10 07:59:56 crc kubenswrapper[4765]: E1210 07:59:56.872098 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3dc9f58c7cbfcb177480b0b3ff42ac67c5458705e2b42a3276d01bd52e491a3\": container with ID starting with a3dc9f58c7cbfcb177480b0b3ff42ac67c5458705e2b42a3276d01bd52e491a3 not found: ID does not exist" containerID="a3dc9f58c7cbfcb177480b0b3ff42ac67c5458705e2b42a3276d01bd52e491a3" Dec 10 07:59:56 crc kubenswrapper[4765]: I1210 07:59:56.872120 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3dc9f58c7cbfcb177480b0b3ff42ac67c5458705e2b42a3276d01bd52e491a3"} err="failed to get container status \"a3dc9f58c7cbfcb177480b0b3ff42ac67c5458705e2b42a3276d01bd52e491a3\": rpc error: code = NotFound desc = could not find container \"a3dc9f58c7cbfcb177480b0b3ff42ac67c5458705e2b42a3276d01bd52e491a3\": container with ID starting with a3dc9f58c7cbfcb177480b0b3ff42ac67c5458705e2b42a3276d01bd52e491a3 not found: ID does not exist" Dec 10 07:59:58 crc kubenswrapper[4765]: I1210 07:59:58.598524 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1dfa681-234a-40bb-9d40-bb1cfa4b2d09" path="/var/lib/kubelet/pods/e1dfa681-234a-40bb-9d40-bb1cfa4b2d09/volumes" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.161280 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8"] Dec 10 08:00:00 crc kubenswrapper[4765]: E1210 08:00:00.161965 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1dfa681-234a-40bb-9d40-bb1cfa4b2d09" containerName="extract-content" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.161982 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1dfa681-234a-40bb-9d40-bb1cfa4b2d09" containerName="extract-content" Dec 10 08:00:00 crc kubenswrapper[4765]: E1210 08:00:00.162004 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1dfa681-234a-40bb-9d40-bb1cfa4b2d09" containerName="extract-utilities" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.162012 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1dfa681-234a-40bb-9d40-bb1cfa4b2d09" containerName="extract-utilities" Dec 10 08:00:00 crc kubenswrapper[4765]: E1210 08:00:00.162030 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1dfa681-234a-40bb-9d40-bb1cfa4b2d09" containerName="registry-server" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.162040 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1dfa681-234a-40bb-9d40-bb1cfa4b2d09" containerName="registry-server" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.162267 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1dfa681-234a-40bb-9d40-bb1cfa4b2d09" containerName="registry-server" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.162969 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.173577 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.174328 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.194659 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8"] Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.221325 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0e180da1-401a-4844-a80e-b92e52332c44-secret-volume\") pod \"collect-profiles-29422560-7mpc8\" (UID: \"0e180da1-401a-4844-a80e-b92e52332c44\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.221379 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvdwc\" (UniqueName: \"kubernetes.io/projected/0e180da1-401a-4844-a80e-b92e52332c44-kube-api-access-nvdwc\") pod \"collect-profiles-29422560-7mpc8\" (UID: \"0e180da1-401a-4844-a80e-b92e52332c44\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.221421 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0e180da1-401a-4844-a80e-b92e52332c44-config-volume\") pod \"collect-profiles-29422560-7mpc8\" (UID: \"0e180da1-401a-4844-a80e-b92e52332c44\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.322453 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0e180da1-401a-4844-a80e-b92e52332c44-secret-volume\") pod \"collect-profiles-29422560-7mpc8\" (UID: \"0e180da1-401a-4844-a80e-b92e52332c44\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.322531 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvdwc\" (UniqueName: \"kubernetes.io/projected/0e180da1-401a-4844-a80e-b92e52332c44-kube-api-access-nvdwc\") pod \"collect-profiles-29422560-7mpc8\" (UID: \"0e180da1-401a-4844-a80e-b92e52332c44\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.322581 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0e180da1-401a-4844-a80e-b92e52332c44-config-volume\") pod \"collect-profiles-29422560-7mpc8\" (UID: \"0e180da1-401a-4844-a80e-b92e52332c44\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.325550 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0e180da1-401a-4844-a80e-b92e52332c44-config-volume\") pod \"collect-profiles-29422560-7mpc8\" (UID: \"0e180da1-401a-4844-a80e-b92e52332c44\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.328117 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0e180da1-401a-4844-a80e-b92e52332c44-secret-volume\") pod \"collect-profiles-29422560-7mpc8\" (UID: \"0e180da1-401a-4844-a80e-b92e52332c44\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.339742 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvdwc\" (UniqueName: \"kubernetes.io/projected/0e180da1-401a-4844-a80e-b92e52332c44-kube-api-access-nvdwc\") pod \"collect-profiles-29422560-7mpc8\" (UID: \"0e180da1-401a-4844-a80e-b92e52332c44\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.513115 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8" Dec 10 08:00:00 crc kubenswrapper[4765]: I1210 08:00:00.925405 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8"] Dec 10 08:00:01 crc kubenswrapper[4765]: I1210 08:00:01.827650 4765 generic.go:334] "Generic (PLEG): container finished" podID="0e180da1-401a-4844-a80e-b92e52332c44" containerID="d75f66d4e8c55e5a3edabb8c8f9c4fee6524535b7dff26a9d1b22c5422af27d1" exitCode=0 Dec 10 08:00:01 crc kubenswrapper[4765]: I1210 08:00:01.827730 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8" event={"ID":"0e180da1-401a-4844-a80e-b92e52332c44","Type":"ContainerDied","Data":"d75f66d4e8c55e5a3edabb8c8f9c4fee6524535b7dff26a9d1b22c5422af27d1"} Dec 10 08:00:01 crc kubenswrapper[4765]: I1210 08:00:01.828181 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8" event={"ID":"0e180da1-401a-4844-a80e-b92e52332c44","Type":"ContainerStarted","Data":"5948b9bc5c9e12a65b5966524cb107daed86ea0f17fd46920b6e56f993abb1a0"} Dec 10 08:00:03 crc kubenswrapper[4765]: I1210 08:00:03.094954 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8" Dec 10 08:00:03 crc kubenswrapper[4765]: I1210 08:00:03.170032 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0e180da1-401a-4844-a80e-b92e52332c44-secret-volume\") pod \"0e180da1-401a-4844-a80e-b92e52332c44\" (UID: \"0e180da1-401a-4844-a80e-b92e52332c44\") " Dec 10 08:00:03 crc kubenswrapper[4765]: I1210 08:00:03.170179 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0e180da1-401a-4844-a80e-b92e52332c44-config-volume\") pod \"0e180da1-401a-4844-a80e-b92e52332c44\" (UID: \"0e180da1-401a-4844-a80e-b92e52332c44\") " Dec 10 08:00:03 crc kubenswrapper[4765]: I1210 08:00:03.170208 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvdwc\" (UniqueName: \"kubernetes.io/projected/0e180da1-401a-4844-a80e-b92e52332c44-kube-api-access-nvdwc\") pod \"0e180da1-401a-4844-a80e-b92e52332c44\" (UID: \"0e180da1-401a-4844-a80e-b92e52332c44\") " Dec 10 08:00:03 crc kubenswrapper[4765]: I1210 08:00:03.171024 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e180da1-401a-4844-a80e-b92e52332c44-config-volume" (OuterVolumeSpecName: "config-volume") pod "0e180da1-401a-4844-a80e-b92e52332c44" (UID: "0e180da1-401a-4844-a80e-b92e52332c44"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 08:00:03 crc kubenswrapper[4765]: I1210 08:00:03.175969 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e180da1-401a-4844-a80e-b92e52332c44-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0e180da1-401a-4844-a80e-b92e52332c44" (UID: "0e180da1-401a-4844-a80e-b92e52332c44"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 08:00:03 crc kubenswrapper[4765]: I1210 08:00:03.176013 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e180da1-401a-4844-a80e-b92e52332c44-kube-api-access-nvdwc" (OuterVolumeSpecName: "kube-api-access-nvdwc") pod "0e180da1-401a-4844-a80e-b92e52332c44" (UID: "0e180da1-401a-4844-a80e-b92e52332c44"). InnerVolumeSpecName "kube-api-access-nvdwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 08:00:03 crc kubenswrapper[4765]: I1210 08:00:03.271815 4765 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0e180da1-401a-4844-a80e-b92e52332c44-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 10 08:00:03 crc kubenswrapper[4765]: I1210 08:00:03.271858 4765 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0e180da1-401a-4844-a80e-b92e52332c44-config-volume\") on node \"crc\" DevicePath \"\"" Dec 10 08:00:03 crc kubenswrapper[4765]: I1210 08:00:03.271870 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvdwc\" (UniqueName: \"kubernetes.io/projected/0e180da1-401a-4844-a80e-b92e52332c44-kube-api-access-nvdwc\") on node \"crc\" DevicePath \"\"" Dec 10 08:00:03 crc kubenswrapper[4765]: I1210 08:00:03.846428 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8" event={"ID":"0e180da1-401a-4844-a80e-b92e52332c44","Type":"ContainerDied","Data":"5948b9bc5c9e12a65b5966524cb107daed86ea0f17fd46920b6e56f993abb1a0"} Dec 10 08:00:03 crc kubenswrapper[4765]: I1210 08:00:03.846482 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5948b9bc5c9e12a65b5966524cb107daed86ea0f17fd46920b6e56f993abb1a0" Dec 10 08:00:03 crc kubenswrapper[4765]: I1210 08:00:03.846545 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422560-7mpc8" Dec 10 08:00:04 crc kubenswrapper[4765]: I1210 08:00:04.049257 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 08:00:04 crc kubenswrapper[4765]: I1210 08:00:04.049329 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 08:00:04 crc kubenswrapper[4765]: I1210 08:00:04.165327 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx"] Dec 10 08:00:04 crc kubenswrapper[4765]: I1210 08:00:04.171909 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422515-kbmrx"] Dec 10 08:00:04 crc kubenswrapper[4765]: I1210 08:00:04.598827 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9680dd1c-fbb4-4869-b998-f395f4fab06f" path="/var/lib/kubelet/pods/9680dd1c-fbb4-4869-b998-f395f4fab06f/volumes" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.144696 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cddbb9455-26jjv"] Dec 10 08:00:28 crc kubenswrapper[4765]: E1210 08:00:28.145735 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e180da1-401a-4844-a80e-b92e52332c44" containerName="collect-profiles" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.145758 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e180da1-401a-4844-a80e-b92e52332c44" containerName="collect-profiles" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.145971 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e180da1-401a-4844-a80e-b92e52332c44" containerName="collect-profiles" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.147017 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.149789 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.150654 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-76dft" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.150723 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.150893 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.150675 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.192592 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cddbb9455-26jjv"] Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.228029 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32421dc9-d4ec-48eb-bd12-ab876ea9e40d-config\") pod \"dnsmasq-dns-7cddbb9455-26jjv\" (UID: \"32421dc9-d4ec-48eb-bd12-ab876ea9e40d\") " pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.228079 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/32421dc9-d4ec-48eb-bd12-ab876ea9e40d-dns-svc\") pod \"dnsmasq-dns-7cddbb9455-26jjv\" (UID: \"32421dc9-d4ec-48eb-bd12-ab876ea9e40d\") " pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.228230 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8g9f9\" (UniqueName: \"kubernetes.io/projected/32421dc9-d4ec-48eb-bd12-ab876ea9e40d-kube-api-access-8g9f9\") pod \"dnsmasq-dns-7cddbb9455-26jjv\" (UID: \"32421dc9-d4ec-48eb-bd12-ab876ea9e40d\") " pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.329918 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32421dc9-d4ec-48eb-bd12-ab876ea9e40d-config\") pod \"dnsmasq-dns-7cddbb9455-26jjv\" (UID: \"32421dc9-d4ec-48eb-bd12-ab876ea9e40d\") " pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.329963 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/32421dc9-d4ec-48eb-bd12-ab876ea9e40d-dns-svc\") pod \"dnsmasq-dns-7cddbb9455-26jjv\" (UID: \"32421dc9-d4ec-48eb-bd12-ab876ea9e40d\") " pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.329991 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8g9f9\" (UniqueName: \"kubernetes.io/projected/32421dc9-d4ec-48eb-bd12-ab876ea9e40d-kube-api-access-8g9f9\") pod \"dnsmasq-dns-7cddbb9455-26jjv\" (UID: \"32421dc9-d4ec-48eb-bd12-ab876ea9e40d\") " pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.331324 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32421dc9-d4ec-48eb-bd12-ab876ea9e40d-config\") pod \"dnsmasq-dns-7cddbb9455-26jjv\" (UID: \"32421dc9-d4ec-48eb-bd12-ab876ea9e40d\") " pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.332126 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/32421dc9-d4ec-48eb-bd12-ab876ea9e40d-dns-svc\") pod \"dnsmasq-dns-7cddbb9455-26jjv\" (UID: \"32421dc9-d4ec-48eb-bd12-ab876ea9e40d\") " pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.358306 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8g9f9\" (UniqueName: \"kubernetes.io/projected/32421dc9-d4ec-48eb-bd12-ab876ea9e40d-kube-api-access-8g9f9\") pod \"dnsmasq-dns-7cddbb9455-26jjv\" (UID: \"32421dc9-d4ec-48eb-bd12-ab876ea9e40d\") " pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.402116 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c99fb6b65-knzvz"] Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.404052 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.430202 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c99fb6b65-knzvz"] Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.431259 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a8e07587-c542-491a-bef5-c968726115c4-dns-svc\") pod \"dnsmasq-dns-7c99fb6b65-knzvz\" (UID: \"a8e07587-c542-491a-bef5-c968726115c4\") " pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.431353 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwfb8\" (UniqueName: \"kubernetes.io/projected/a8e07587-c542-491a-bef5-c968726115c4-kube-api-access-dwfb8\") pod \"dnsmasq-dns-7c99fb6b65-knzvz\" (UID: \"a8e07587-c542-491a-bef5-c968726115c4\") " pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.431448 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8e07587-c542-491a-bef5-c968726115c4-config\") pod \"dnsmasq-dns-7c99fb6b65-knzvz\" (UID: \"a8e07587-c542-491a-bef5-c968726115c4\") " pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.475360 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.533315 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8e07587-c542-491a-bef5-c968726115c4-config\") pod \"dnsmasq-dns-7c99fb6b65-knzvz\" (UID: \"a8e07587-c542-491a-bef5-c968726115c4\") " pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.533457 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a8e07587-c542-491a-bef5-c968726115c4-dns-svc\") pod \"dnsmasq-dns-7c99fb6b65-knzvz\" (UID: \"a8e07587-c542-491a-bef5-c968726115c4\") " pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.533500 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwfb8\" (UniqueName: \"kubernetes.io/projected/a8e07587-c542-491a-bef5-c968726115c4-kube-api-access-dwfb8\") pod \"dnsmasq-dns-7c99fb6b65-knzvz\" (UID: \"a8e07587-c542-491a-bef5-c968726115c4\") " pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.534588 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8e07587-c542-491a-bef5-c968726115c4-config\") pod \"dnsmasq-dns-7c99fb6b65-knzvz\" (UID: \"a8e07587-c542-491a-bef5-c968726115c4\") " pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.534897 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a8e07587-c542-491a-bef5-c968726115c4-dns-svc\") pod \"dnsmasq-dns-7c99fb6b65-knzvz\" (UID: \"a8e07587-c542-491a-bef5-c968726115c4\") " pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.565030 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwfb8\" (UniqueName: \"kubernetes.io/projected/a8e07587-c542-491a-bef5-c968726115c4-kube-api-access-dwfb8\") pod \"dnsmasq-dns-7c99fb6b65-knzvz\" (UID: \"a8e07587-c542-491a-bef5-c968726115c4\") " pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.726389 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" Dec 10 08:00:28 crc kubenswrapper[4765]: I1210 08:00:28.987999 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cddbb9455-26jjv"] Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.034268 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" event={"ID":"32421dc9-d4ec-48eb-bd12-ab876ea9e40d","Type":"ContainerStarted","Data":"f5525e39c96c32edee1d2e8e14b4f0988550f1a5aaea6d3f99fb61a2f126b394"} Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.207364 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c99fb6b65-knzvz"] Dec 10 08:00:29 crc kubenswrapper[4765]: W1210 08:00:29.212216 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8e07587_c542_491a_bef5_c968726115c4.slice/crio-2e9b52b6e84abc734b20cb5acf818445a7106b0cfa06d620563f5639a25b73e1 WatchSource:0}: Error finding container 2e9b52b6e84abc734b20cb5acf818445a7106b0cfa06d620563f5639a25b73e1: Status 404 returned error can't find the container with id 2e9b52b6e84abc734b20cb5acf818445a7106b0cfa06d620563f5639a25b73e1 Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.269848 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.271873 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.290472 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.290710 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.290945 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.291127 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-hb5xw" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.291362 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.302264 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.348538 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5809588a-3e18-4443-9722-1903d42d84f9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5809588a-3e18-4443-9722-1903d42d84f9\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.348609 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.348736 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.348773 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.348819 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwgl9\" (UniqueName: \"kubernetes.io/projected/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-kube-api-access-cwgl9\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.348846 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.348869 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.348886 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.348907 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.450953 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwgl9\" (UniqueName: \"kubernetes.io/projected/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-kube-api-access-cwgl9\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.451067 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.451141 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.451167 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.451200 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.451293 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5809588a-3e18-4443-9722-1903d42d84f9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5809588a-3e18-4443-9722-1903d42d84f9\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.451325 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.451405 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.451441 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.453803 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.454027 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.456566 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.458382 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.460386 4765 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.460481 4765 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5809588a-3e18-4443-9722-1903d42d84f9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5809588a-3e18-4443-9722-1903d42d84f9\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/16f2f6e81626d911290aa8c80c521e5ef81fd30af9e58bccd4c5e3826e5bb9fc/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.460685 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.461627 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.461999 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.481878 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwgl9\" (UniqueName: \"kubernetes.io/projected/46c457c7-93b0-495c-ae8b-dbdc2e8a605b-kube-api-access-cwgl9\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.506034 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5809588a-3e18-4443-9722-1903d42d84f9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5809588a-3e18-4443-9722-1903d42d84f9\") pod \"rabbitmq-server-0\" (UID: \"46c457c7-93b0-495c-ae8b-dbdc2e8a605b\") " pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.612517 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 10 08:00:29 crc kubenswrapper[4765]: I1210 08:00:29.963017 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 08:00:30 crc kubenswrapper[4765]: W1210 08:00:30.066146 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod46c457c7_93b0_495c_ae8b_dbdc2e8a605b.slice/crio-9a630de70f9039eb3ba90ae4f766e7410c5854eb73eb45f9d5eaf1920fc895d0 WatchSource:0}: Error finding container 9a630de70f9039eb3ba90ae4f766e7410c5854eb73eb45f9d5eaf1920fc895d0: Status 404 returned error can't find the container with id 9a630de70f9039eb3ba90ae4f766e7410c5854eb73eb45f9d5eaf1920fc895d0 Dec 10 08:00:30 crc kubenswrapper[4765]: I1210 08:00:30.103192 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" event={"ID":"a8e07587-c542-491a-bef5-c968726115c4","Type":"ContainerStarted","Data":"2e9b52b6e84abc734b20cb5acf818445a7106b0cfa06d620563f5639a25b73e1"} Dec 10 08:00:30 crc kubenswrapper[4765]: I1210 08:00:30.245843 4765 scope.go:117] "RemoveContainer" containerID="5bfbb24055fd6246eb60dc3da32841f987f99480396094e3193c4530b8f8d039" Dec 10 08:00:30 crc kubenswrapper[4765]: I1210 08:00:30.898214 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 10 08:00:30 crc kubenswrapper[4765]: I1210 08:00:30.900953 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 10 08:00:30 crc kubenswrapper[4765]: I1210 08:00:30.906767 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 10 08:00:30 crc kubenswrapper[4765]: I1210 08:00:30.909707 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-r22fz" Dec 10 08:00:30 crc kubenswrapper[4765]: I1210 08:00:30.910554 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 10 08:00:30 crc kubenswrapper[4765]: I1210 08:00:30.910715 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 10 08:00:30 crc kubenswrapper[4765]: I1210 08:00:30.916418 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 10 08:00:30 crc kubenswrapper[4765]: I1210 08:00:30.921176 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.088796 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c9b61338-8aac-4f36-9bfc-0ec7601c9345-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.088896 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9b61338-8aac-4f36-9bfc-0ec7601c9345-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.088938 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9b61338-8aac-4f36-9bfc-0ec7601c9345-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.089626 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mwk6\" (UniqueName: \"kubernetes.io/projected/c9b61338-8aac-4f36-9bfc-0ec7601c9345-kube-api-access-5mwk6\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.089681 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c9b61338-8aac-4f36-9bfc-0ec7601c9345-config-data-default\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.089719 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c9b61338-8aac-4f36-9bfc-0ec7601c9345-kolla-config\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.089745 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-079dbd25-5e27-448c-83ba-3180683ce108\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-079dbd25-5e27-448c-83ba-3180683ce108\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.089781 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9b61338-8aac-4f36-9bfc-0ec7601c9345-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.113755 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"46c457c7-93b0-495c-ae8b-dbdc2e8a605b","Type":"ContainerStarted","Data":"9a630de70f9039eb3ba90ae4f766e7410c5854eb73eb45f9d5eaf1920fc895d0"} Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.192696 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9b61338-8aac-4f36-9bfc-0ec7601c9345-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.192743 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9b61338-8aac-4f36-9bfc-0ec7601c9345-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.192804 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mwk6\" (UniqueName: \"kubernetes.io/projected/c9b61338-8aac-4f36-9bfc-0ec7601c9345-kube-api-access-5mwk6\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.192837 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c9b61338-8aac-4f36-9bfc-0ec7601c9345-config-data-default\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.192893 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c9b61338-8aac-4f36-9bfc-0ec7601c9345-kolla-config\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.192917 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-079dbd25-5e27-448c-83ba-3180683ce108\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-079dbd25-5e27-448c-83ba-3180683ce108\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.192981 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9b61338-8aac-4f36-9bfc-0ec7601c9345-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.193035 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c9b61338-8aac-4f36-9bfc-0ec7601c9345-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.193879 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c9b61338-8aac-4f36-9bfc-0ec7601c9345-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.194388 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c9b61338-8aac-4f36-9bfc-0ec7601c9345-kolla-config\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.194958 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9b61338-8aac-4f36-9bfc-0ec7601c9345-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.200002 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c9b61338-8aac-4f36-9bfc-0ec7601c9345-config-data-default\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.203304 4765 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.203369 4765 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-079dbd25-5e27-448c-83ba-3180683ce108\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-079dbd25-5e27-448c-83ba-3180683ce108\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/4e83ac0595aa258daf9ca1fbd55c60a9ad9a15fd924db99cdf77910d2f71f774/globalmount\"" pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.219926 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9b61338-8aac-4f36-9bfc-0ec7601c9345-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.226164 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9b61338-8aac-4f36-9bfc-0ec7601c9345-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.231613 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mwk6\" (UniqueName: \"kubernetes.io/projected/c9b61338-8aac-4f36-9bfc-0ec7601c9345-kube-api-access-5mwk6\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.306052 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-079dbd25-5e27-448c-83ba-3180683ce108\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-079dbd25-5e27-448c-83ba-3180683ce108\") pod \"openstack-galera-0\" (UID: \"c9b61338-8aac-4f36-9bfc-0ec7601c9345\") " pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.333245 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.334283 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.337355 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-wrmgj" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.345931 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.356463 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.398412 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0866a219-924f-427d-98a9-2b490ca24c9c-config-data\") pod \"memcached-0\" (UID: \"0866a219-924f-427d-98a9-2b490ca24c9c\") " pod="openstack/memcached-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.398467 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xc5nd\" (UniqueName: \"kubernetes.io/projected/0866a219-924f-427d-98a9-2b490ca24c9c-kube-api-access-xc5nd\") pod \"memcached-0\" (UID: \"0866a219-924f-427d-98a9-2b490ca24c9c\") " pod="openstack/memcached-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.398500 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0866a219-924f-427d-98a9-2b490ca24c9c-kolla-config\") pod \"memcached-0\" (UID: \"0866a219-924f-427d-98a9-2b490ca24c9c\") " pod="openstack/memcached-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.500286 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0866a219-924f-427d-98a9-2b490ca24c9c-config-data\") pod \"memcached-0\" (UID: \"0866a219-924f-427d-98a9-2b490ca24c9c\") " pod="openstack/memcached-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.500378 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xc5nd\" (UniqueName: \"kubernetes.io/projected/0866a219-924f-427d-98a9-2b490ca24c9c-kube-api-access-xc5nd\") pod \"memcached-0\" (UID: \"0866a219-924f-427d-98a9-2b490ca24c9c\") " pod="openstack/memcached-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.500483 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0866a219-924f-427d-98a9-2b490ca24c9c-kolla-config\") pod \"memcached-0\" (UID: \"0866a219-924f-427d-98a9-2b490ca24c9c\") " pod="openstack/memcached-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.501464 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0866a219-924f-427d-98a9-2b490ca24c9c-kolla-config\") pod \"memcached-0\" (UID: \"0866a219-924f-427d-98a9-2b490ca24c9c\") " pod="openstack/memcached-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.501527 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0866a219-924f-427d-98a9-2b490ca24c9c-config-data\") pod \"memcached-0\" (UID: \"0866a219-924f-427d-98a9-2b490ca24c9c\") " pod="openstack/memcached-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.528551 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xc5nd\" (UniqueName: \"kubernetes.io/projected/0866a219-924f-427d-98a9-2b490ca24c9c-kube-api-access-xc5nd\") pod \"memcached-0\" (UID: \"0866a219-924f-427d-98a9-2b490ca24c9c\") " pod="openstack/memcached-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.539987 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 10 08:00:31 crc kubenswrapper[4765]: I1210 08:00:31.673239 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.085829 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.215803 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.285320 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.287215 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.291783 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.291920 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.292753 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.292896 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-csbc5" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.295029 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.318767 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.318851 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.318878 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-2f3d7ab1-5858-4ef2-b5ae-a3e309cd1159\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2f3d7ab1-5858-4ef2-b5ae-a3e309cd1159\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.318893 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.319000 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdr2f\" (UniqueName: \"kubernetes.io/projected/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-kube-api-access-zdr2f\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.319077 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.319256 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.319378 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: W1210 08:00:32.375882 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc9b61338_8aac_4f36_9bfc_0ec7601c9345.slice/crio-1e635b7e491f8934376a621aa09921257f253ef068cf46b433704194ef3f159d WatchSource:0}: Error finding container 1e635b7e491f8934376a621aa09921257f253ef068cf46b433704194ef3f159d: Status 404 returned error can't find the container with id 1e635b7e491f8934376a621aa09921257f253ef068cf46b433704194ef3f159d Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.430328 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdr2f\" (UniqueName: \"kubernetes.io/projected/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-kube-api-access-zdr2f\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.430410 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.430450 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.432173 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.432339 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.432384 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.432408 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-2f3d7ab1-5858-4ef2-b5ae-a3e309cd1159\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2f3d7ab1-5858-4ef2-b5ae-a3e309cd1159\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.432427 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.433524 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.433560 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.434959 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.435691 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.436169 4765 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.436209 4765 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-2f3d7ab1-5858-4ef2-b5ae-a3e309cd1159\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2f3d7ab1-5858-4ef2-b5ae-a3e309cd1159\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a7aa790ed5b68cbdcd14f4eb29e0fe646c772b3d8f6971f5df2768ee21472da3/globalmount\"" pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.438885 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.438959 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.475156 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-2f3d7ab1-5858-4ef2-b5ae-a3e309cd1159\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2f3d7ab1-5858-4ef2-b5ae-a3e309cd1159\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.482037 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdr2f\" (UniqueName: \"kubernetes.io/projected/97649c65-e9e3-45cb-9570-2dd2cb9bc1b3-kube-api-access-zdr2f\") pod \"openstack-cell1-galera-0\" (UID: \"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3\") " pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:32 crc kubenswrapper[4765]: I1210 08:00:32.624357 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 10 08:00:33 crc kubenswrapper[4765]: I1210 08:00:33.083173 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 10 08:00:33 crc kubenswrapper[4765]: I1210 08:00:33.140777 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"0866a219-924f-427d-98a9-2b490ca24c9c","Type":"ContainerStarted","Data":"cb86e601e39b0f887cb12b9137b962ed28e3d5be6fbb3c3934fe9d743de5d6b1"} Dec 10 08:00:33 crc kubenswrapper[4765]: I1210 08:00:33.142773 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3","Type":"ContainerStarted","Data":"49846f07c9b05397dad0376a7de3f7f1eb34c220f17b3043d1696f8650430e0f"} Dec 10 08:00:33 crc kubenswrapper[4765]: I1210 08:00:33.144343 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c9b61338-8aac-4f36-9bfc-0ec7601c9345","Type":"ContainerStarted","Data":"1e635b7e491f8934376a621aa09921257f253ef068cf46b433704194ef3f159d"} Dec 10 08:00:34 crc kubenswrapper[4765]: I1210 08:00:34.049513 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 08:00:34 crc kubenswrapper[4765]: I1210 08:00:34.049646 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 08:00:34 crc kubenswrapper[4765]: I1210 08:00:34.049726 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 08:00:34 crc kubenswrapper[4765]: I1210 08:00:34.050879 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286"} pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 08:00:34 crc kubenswrapper[4765]: I1210 08:00:34.051015 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" containerID="cri-o://eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" gracePeriod=600 Dec 10 08:00:35 crc kubenswrapper[4765]: I1210 08:00:35.169610 4765 generic.go:334] "Generic (PLEG): container finished" podID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" exitCode=0 Dec 10 08:00:35 crc kubenswrapper[4765]: I1210 08:00:35.169976 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerDied","Data":"eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286"} Dec 10 08:00:35 crc kubenswrapper[4765]: I1210 08:00:35.170026 4765 scope.go:117] "RemoveContainer" containerID="fb69862d770b3a99e6d1672526b9cfd663259d0384b956ca066d13aec349d21b" Dec 10 08:00:35 crc kubenswrapper[4765]: E1210 08:00:35.284823 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:00:36 crc kubenswrapper[4765]: I1210 08:00:36.181449 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:00:36 crc kubenswrapper[4765]: E1210 08:00:36.181780 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:00:48 crc kubenswrapper[4765]: I1210 08:00:48.593394 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:00:48 crc kubenswrapper[4765]: E1210 08:00:48.601645 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:00:54 crc kubenswrapper[4765]: I1210 08:00:54.222661 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-l84j8"] Dec 10 08:00:54 crc kubenswrapper[4765]: I1210 08:00:54.240562 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l84j8"] Dec 10 08:00:54 crc kubenswrapper[4765]: I1210 08:00:54.241296 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l84j8" Dec 10 08:00:54 crc kubenswrapper[4765]: I1210 08:00:54.300201 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd324638-c97a-4e72-bab7-6973a240af1c-catalog-content\") pod \"redhat-operators-l84j8\" (UID: \"bd324638-c97a-4e72-bab7-6973a240af1c\") " pod="openshift-marketplace/redhat-operators-l84j8" Dec 10 08:00:54 crc kubenswrapper[4765]: I1210 08:00:54.300592 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jzqm\" (UniqueName: \"kubernetes.io/projected/bd324638-c97a-4e72-bab7-6973a240af1c-kube-api-access-8jzqm\") pod \"redhat-operators-l84j8\" (UID: \"bd324638-c97a-4e72-bab7-6973a240af1c\") " pod="openshift-marketplace/redhat-operators-l84j8" Dec 10 08:00:54 crc kubenswrapper[4765]: I1210 08:00:54.300630 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd324638-c97a-4e72-bab7-6973a240af1c-utilities\") pod \"redhat-operators-l84j8\" (UID: \"bd324638-c97a-4e72-bab7-6973a240af1c\") " pod="openshift-marketplace/redhat-operators-l84j8" Dec 10 08:00:54 crc kubenswrapper[4765]: I1210 08:00:54.402140 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jzqm\" (UniqueName: \"kubernetes.io/projected/bd324638-c97a-4e72-bab7-6973a240af1c-kube-api-access-8jzqm\") pod \"redhat-operators-l84j8\" (UID: \"bd324638-c97a-4e72-bab7-6973a240af1c\") " pod="openshift-marketplace/redhat-operators-l84j8" Dec 10 08:00:54 crc kubenswrapper[4765]: I1210 08:00:54.402226 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd324638-c97a-4e72-bab7-6973a240af1c-utilities\") pod \"redhat-operators-l84j8\" (UID: \"bd324638-c97a-4e72-bab7-6973a240af1c\") " pod="openshift-marketplace/redhat-operators-l84j8" Dec 10 08:00:54 crc kubenswrapper[4765]: I1210 08:00:54.402356 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd324638-c97a-4e72-bab7-6973a240af1c-catalog-content\") pod \"redhat-operators-l84j8\" (UID: \"bd324638-c97a-4e72-bab7-6973a240af1c\") " pod="openshift-marketplace/redhat-operators-l84j8" Dec 10 08:00:54 crc kubenswrapper[4765]: I1210 08:00:54.402893 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd324638-c97a-4e72-bab7-6973a240af1c-catalog-content\") pod \"redhat-operators-l84j8\" (UID: \"bd324638-c97a-4e72-bab7-6973a240af1c\") " pod="openshift-marketplace/redhat-operators-l84j8" Dec 10 08:00:54 crc kubenswrapper[4765]: I1210 08:00:54.405353 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd324638-c97a-4e72-bab7-6973a240af1c-utilities\") pod \"redhat-operators-l84j8\" (UID: \"bd324638-c97a-4e72-bab7-6973a240af1c\") " pod="openshift-marketplace/redhat-operators-l84j8" Dec 10 08:00:54 crc kubenswrapper[4765]: I1210 08:00:54.441965 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jzqm\" (UniqueName: \"kubernetes.io/projected/bd324638-c97a-4e72-bab7-6973a240af1c-kube-api-access-8jzqm\") pod \"redhat-operators-l84j8\" (UID: \"bd324638-c97a-4e72-bab7-6973a240af1c\") " pod="openshift-marketplace/redhat-operators-l84j8" Dec 10 08:00:54 crc kubenswrapper[4765]: I1210 08:00:54.574572 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l84j8" Dec 10 08:00:55 crc kubenswrapper[4765]: E1210 08:00:55.590401 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:ac695e0386fab75eca56b7765773b9c5" Dec 10 08:00:55 crc kubenswrapper[4765]: E1210 08:00:55.590941 4765 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:ac695e0386fab75eca56b7765773b9c5" Dec 10 08:00:55 crc kubenswrapper[4765]: E1210 08:00:55.591190 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:ac695e0386fab75eca56b7765773b9c5,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n564h564h676h699hcdh67bh66hfdh569h545h648h94h546h696h668h89h96h667h575h595h5d9h584h8dhbdh697h54bhb7h58fh5c9hd8h5cdh5c7q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dwfb8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-7c99fb6b65-knzvz_openstack(a8e07587-c542-491a-bef5-c968726115c4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 08:00:55 crc kubenswrapper[4765]: E1210 08:00:55.592415 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" podUID="a8e07587-c542-491a-bef5-c968726115c4" Dec 10 08:00:55 crc kubenswrapper[4765]: E1210 08:00:55.625380 4765 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:ac695e0386fab75eca56b7765773b9c5" Dec 10 08:00:55 crc kubenswrapper[4765]: E1210 08:00:55.625459 4765 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:ac695e0386fab75eca56b7765773b9c5" Dec 10 08:00:55 crc kubenswrapper[4765]: E1210 08:00:55.625613 4765 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:ac695e0386fab75eca56b7765773b9c5,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n8chc6h5bh56fh546hb7hc8h67h5bchffh577h697h5b5h5bdh59bhf6hf4h558hb5h578h595h5cchfbh644h59ch7fh654h547h587h5cbh5d5h8fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8g9f9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-7cddbb9455-26jjv_openstack(32421dc9-d4ec-48eb-bd12-ab876ea9e40d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 08:00:55 crc kubenswrapper[4765]: E1210 08:00:55.626865 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" podUID="32421dc9-d4ec-48eb-bd12-ab876ea9e40d" Dec 10 08:00:55 crc kubenswrapper[4765]: I1210 08:00:55.972317 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l84j8"] Dec 10 08:00:55 crc kubenswrapper[4765]: W1210 08:00:55.975546 4765 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbd324638_c97a_4e72_bab7_6973a240af1c.slice/crio-c716b70d549fd38ab424a60a478a6b82f4c15e2c33dc874178727318260e9405 WatchSource:0}: Error finding container c716b70d549fd38ab424a60a478a6b82f4c15e2c33dc874178727318260e9405: Status 404 returned error can't find the container with id c716b70d549fd38ab424a60a478a6b82f4c15e2c33dc874178727318260e9405 Dec 10 08:00:56 crc kubenswrapper[4765]: I1210 08:00:56.396071 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c9b61338-8aac-4f36-9bfc-0ec7601c9345","Type":"ContainerStarted","Data":"f1b1a9c847f7b53a3f2faf34ff34a3781f9c87f52a6cab2e2459aa21bda97a9d"} Dec 10 08:00:56 crc kubenswrapper[4765]: I1210 08:00:56.397962 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"0866a219-924f-427d-98a9-2b490ca24c9c","Type":"ContainerStarted","Data":"86c44e7a378e8423a635c094a4dbb9ecc81d30a9a1c6fb497768e5445cf2ec07"} Dec 10 08:00:56 crc kubenswrapper[4765]: I1210 08:00:56.398178 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 10 08:00:56 crc kubenswrapper[4765]: I1210 08:00:56.399688 4765 generic.go:334] "Generic (PLEG): container finished" podID="bd324638-c97a-4e72-bab7-6973a240af1c" containerID="60ac4e3e047e18074b98ec2bb56f853e281cfbeee3d3eb8463eb0cb1c2249351" exitCode=0 Dec 10 08:00:56 crc kubenswrapper[4765]: I1210 08:00:56.399870 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l84j8" event={"ID":"bd324638-c97a-4e72-bab7-6973a240af1c","Type":"ContainerDied","Data":"60ac4e3e047e18074b98ec2bb56f853e281cfbeee3d3eb8463eb0cb1c2249351"} Dec 10 08:00:56 crc kubenswrapper[4765]: I1210 08:00:56.399923 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l84j8" event={"ID":"bd324638-c97a-4e72-bab7-6973a240af1c","Type":"ContainerStarted","Data":"c716b70d549fd38ab424a60a478a6b82f4c15e2c33dc874178727318260e9405"} Dec 10 08:00:56 crc kubenswrapper[4765]: I1210 08:00:56.401847 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3","Type":"ContainerStarted","Data":"54fb615209c1271ddddb8350ad8b9853f419a6f772f465975b2cff255b7da184"} Dec 10 08:00:56 crc kubenswrapper[4765]: E1210 08:00:56.403260 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:ac695e0386fab75eca56b7765773b9c5\\\"\"" pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" podUID="32421dc9-d4ec-48eb-bd12-ab876ea9e40d" Dec 10 08:00:56 crc kubenswrapper[4765]: E1210 08:00:56.403322 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:ac695e0386fab75eca56b7765773b9c5\\\"\"" pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" podUID="a8e07587-c542-491a-bef5-c968726115c4" Dec 10 08:00:56 crc kubenswrapper[4765]: I1210 08:00:56.512736 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.272931059 podStartE2EDuration="25.512715359s" podCreationTimestamp="2025-12-10 08:00:31 +0000 UTC" firstStartedPulling="2025-12-10 08:00:32.386398606 +0000 UTC m=+4352.113063922" lastFinishedPulling="2025-12-10 08:00:55.626182896 +0000 UTC m=+4375.352848222" observedRunningTime="2025-12-10 08:00:56.512002568 +0000 UTC m=+4376.238667884" watchObservedRunningTime="2025-12-10 08:00:56.512715359 +0000 UTC m=+4376.239380675" Dec 10 08:00:57 crc kubenswrapper[4765]: I1210 08:00:57.409604 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"46c457c7-93b0-495c-ae8b-dbdc2e8a605b","Type":"ContainerStarted","Data":"c53bbb2149757758adcf65610dcc4bfa1008c8f513fc5caa9681feca96db3870"} Dec 10 08:00:58 crc kubenswrapper[4765]: I1210 08:00:58.419126 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l84j8" event={"ID":"bd324638-c97a-4e72-bab7-6973a240af1c","Type":"ContainerStarted","Data":"4fa5530b2053ee5418f96700ac4653f3784d5a7498f3e026aa54edd646649ccd"} Dec 10 08:00:59 crc kubenswrapper[4765]: I1210 08:00:59.428680 4765 generic.go:334] "Generic (PLEG): container finished" podID="bd324638-c97a-4e72-bab7-6973a240af1c" containerID="4fa5530b2053ee5418f96700ac4653f3784d5a7498f3e026aa54edd646649ccd" exitCode=0 Dec 10 08:00:59 crc kubenswrapper[4765]: I1210 08:00:59.428785 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l84j8" event={"ID":"bd324638-c97a-4e72-bab7-6973a240af1c","Type":"ContainerDied","Data":"4fa5530b2053ee5418f96700ac4653f3784d5a7498f3e026aa54edd646649ccd"} Dec 10 08:00:59 crc kubenswrapper[4765]: I1210 08:00:59.430765 4765 generic.go:334] "Generic (PLEG): container finished" podID="97649c65-e9e3-45cb-9570-2dd2cb9bc1b3" containerID="54fb615209c1271ddddb8350ad8b9853f419a6f772f465975b2cff255b7da184" exitCode=0 Dec 10 08:00:59 crc kubenswrapper[4765]: I1210 08:00:59.430853 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3","Type":"ContainerDied","Data":"54fb615209c1271ddddb8350ad8b9853f419a6f772f465975b2cff255b7da184"} Dec 10 08:00:59 crc kubenswrapper[4765]: I1210 08:00:59.433927 4765 generic.go:334] "Generic (PLEG): container finished" podID="c9b61338-8aac-4f36-9bfc-0ec7601c9345" containerID="f1b1a9c847f7b53a3f2faf34ff34a3781f9c87f52a6cab2e2459aa21bda97a9d" exitCode=0 Dec 10 08:00:59 crc kubenswrapper[4765]: I1210 08:00:59.433986 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c9b61338-8aac-4f36-9bfc-0ec7601c9345","Type":"ContainerDied","Data":"f1b1a9c847f7b53a3f2faf34ff34a3781f9c87f52a6cab2e2459aa21bda97a9d"} Dec 10 08:00:59 crc kubenswrapper[4765]: I1210 08:00:59.589115 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:00:59 crc kubenswrapper[4765]: E1210 08:00:59.589458 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:01:00 crc kubenswrapper[4765]: I1210 08:01:00.445595 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c9b61338-8aac-4f36-9bfc-0ec7601c9345","Type":"ContainerStarted","Data":"60bc515f7a70502f0c03286098b4a11e34fdcfad35e7e9f5c2a99a4173b17e0f"} Dec 10 08:01:00 crc kubenswrapper[4765]: I1210 08:01:00.448879 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"97649c65-e9e3-45cb-9570-2dd2cb9bc1b3","Type":"ContainerStarted","Data":"f11867f447526d663a95faa20dd781d5cc3c6e7afdf888ac7fa5fe7fc2510595"} Dec 10 08:01:00 crc kubenswrapper[4765]: I1210 08:01:00.469395 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=8.291759546 podStartE2EDuration="31.469370454s" podCreationTimestamp="2025-12-10 08:00:29 +0000 UTC" firstStartedPulling="2025-12-10 08:00:32.378711876 +0000 UTC m=+4352.105377192" lastFinishedPulling="2025-12-10 08:00:55.556322784 +0000 UTC m=+4375.282988100" observedRunningTime="2025-12-10 08:01:00.465568376 +0000 UTC m=+4380.192233692" watchObservedRunningTime="2025-12-10 08:01:00.469370454 +0000 UTC m=+4380.196035770" Dec 10 08:01:00 crc kubenswrapper[4765]: I1210 08:01:00.489568 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=7.048323963 podStartE2EDuration="29.48954195s" podCreationTimestamp="2025-12-10 08:00:31 +0000 UTC" firstStartedPulling="2025-12-10 08:00:33.094268783 +0000 UTC m=+4352.820934089" lastFinishedPulling="2025-12-10 08:00:55.53548676 +0000 UTC m=+4375.262152076" observedRunningTime="2025-12-10 08:01:00.484880767 +0000 UTC m=+4380.211546093" watchObservedRunningTime="2025-12-10 08:01:00.48954195 +0000 UTC m=+4380.216207266" Dec 10 08:01:01 crc kubenswrapper[4765]: I1210 08:01:01.459125 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l84j8" event={"ID":"bd324638-c97a-4e72-bab7-6973a240af1c","Type":"ContainerStarted","Data":"06ac9b27ec28b2430231f60a13a425823a8709e15054849035097416ddd8edfd"} Dec 10 08:01:01 crc kubenswrapper[4765]: I1210 08:01:01.482752 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-l84j8" podStartSLOduration=3.649614911 podStartE2EDuration="7.482726034s" podCreationTimestamp="2025-12-10 08:00:54 +0000 UTC" firstStartedPulling="2025-12-10 08:00:56.400985542 +0000 UTC m=+4376.127650858" lastFinishedPulling="2025-12-10 08:01:00.234096665 +0000 UTC m=+4379.960761981" observedRunningTime="2025-12-10 08:01:01.475190009 +0000 UTC m=+4381.201855345" watchObservedRunningTime="2025-12-10 08:01:01.482726034 +0000 UTC m=+4381.209391340" Dec 10 08:01:01 crc kubenswrapper[4765]: I1210 08:01:01.540811 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 10 08:01:01 crc kubenswrapper[4765]: I1210 08:01:01.540948 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 10 08:01:01 crc kubenswrapper[4765]: I1210 08:01:01.675315 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 10 08:01:02 crc kubenswrapper[4765]: I1210 08:01:02.624773 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 10 08:01:02 crc kubenswrapper[4765]: I1210 08:01:02.625161 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 10 08:01:04 crc kubenswrapper[4765]: I1210 08:01:04.575611 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-l84j8" Dec 10 08:01:04 crc kubenswrapper[4765]: I1210 08:01:04.575690 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-l84j8" Dec 10 08:01:05 crc kubenswrapper[4765]: I1210 08:01:05.633980 4765 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-l84j8" podUID="bd324638-c97a-4e72-bab7-6973a240af1c" containerName="registry-server" probeResult="failure" output=< Dec 10 08:01:05 crc kubenswrapper[4765]: timeout: failed to connect service ":50051" within 1s Dec 10 08:01:05 crc kubenswrapper[4765]: > Dec 10 08:01:08 crc kubenswrapper[4765]: I1210 08:01:08.879615 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 10 08:01:08 crc kubenswrapper[4765]: I1210 08:01:08.956663 4765 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="97649c65-e9e3-45cb-9570-2dd2cb9bc1b3" containerName="galera" probeResult="failure" output=< Dec 10 08:01:08 crc kubenswrapper[4765]: wsrep_local_state_comment (Joined) differs from Synced Dec 10 08:01:08 crc kubenswrapper[4765]: > Dec 10 08:01:12 crc kubenswrapper[4765]: I1210 08:01:12.329643 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 10 08:01:12 crc kubenswrapper[4765]: I1210 08:01:12.410933 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 10 08:01:12 crc kubenswrapper[4765]: I1210 08:01:12.547167 4765 generic.go:334] "Generic (PLEG): container finished" podID="32421dc9-d4ec-48eb-bd12-ab876ea9e40d" containerID="041f22b5e7cb2d8d9f4e36994d3f9514f7bfed19de4665ef48f1df453cc3ecd4" exitCode=0 Dec 10 08:01:12 crc kubenswrapper[4765]: I1210 08:01:12.548202 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" event={"ID":"32421dc9-d4ec-48eb-bd12-ab876ea9e40d","Type":"ContainerDied","Data":"041f22b5e7cb2d8d9f4e36994d3f9514f7bfed19de4665ef48f1df453cc3ecd4"} Dec 10 08:01:12 crc kubenswrapper[4765]: I1210 08:01:12.590314 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:01:12 crc kubenswrapper[4765]: E1210 08:01:12.590982 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:01:12 crc kubenswrapper[4765]: I1210 08:01:12.752909 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 10 08:01:13 crc kubenswrapper[4765]: I1210 08:01:13.556010 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" event={"ID":"32421dc9-d4ec-48eb-bd12-ab876ea9e40d","Type":"ContainerStarted","Data":"6817aadb02c1e8dc3392db718c67654ce329b8eb6a5b898637bd6591a3a00304"} Dec 10 08:01:13 crc kubenswrapper[4765]: I1210 08:01:13.556326 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" Dec 10 08:01:13 crc kubenswrapper[4765]: I1210 08:01:13.557506 4765 generic.go:334] "Generic (PLEG): container finished" podID="a8e07587-c542-491a-bef5-c968726115c4" containerID="17a78d802b7254c7c6d4367b291969d2fbfec2edce38b18958475b4edc8cc966" exitCode=0 Dec 10 08:01:13 crc kubenswrapper[4765]: I1210 08:01:13.557539 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" event={"ID":"a8e07587-c542-491a-bef5-c968726115c4","Type":"ContainerDied","Data":"17a78d802b7254c7c6d4367b291969d2fbfec2edce38b18958475b4edc8cc966"} Dec 10 08:01:13 crc kubenswrapper[4765]: I1210 08:01:13.579229 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" podStartSLOduration=2.93458158 podStartE2EDuration="45.578838272s" podCreationTimestamp="2025-12-10 08:00:28 +0000 UTC" firstStartedPulling="2025-12-10 08:00:29.004703505 +0000 UTC m=+4348.731368821" lastFinishedPulling="2025-12-10 08:01:11.648960197 +0000 UTC m=+4391.375625513" observedRunningTime="2025-12-10 08:01:13.571626526 +0000 UTC m=+4393.298291862" watchObservedRunningTime="2025-12-10 08:01:13.578838272 +0000 UTC m=+4393.305503588" Dec 10 08:01:14 crc kubenswrapper[4765]: I1210 08:01:14.568952 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" event={"ID":"a8e07587-c542-491a-bef5-c968726115c4","Type":"ContainerStarted","Data":"99717a3f55b92711be185bb20375ba8777edbaa3dace476596e4cb75d919ca7a"} Dec 10 08:01:14 crc kubenswrapper[4765]: I1210 08:01:14.569671 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" Dec 10 08:01:14 crc kubenswrapper[4765]: I1210 08:01:14.594972 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" podStartSLOduration=3.495795706 podStartE2EDuration="46.59494456s" podCreationTimestamp="2025-12-10 08:00:28 +0000 UTC" firstStartedPulling="2025-12-10 08:00:29.215232099 +0000 UTC m=+4348.941897415" lastFinishedPulling="2025-12-10 08:01:12.314380953 +0000 UTC m=+4392.041046269" observedRunningTime="2025-12-10 08:01:14.590823272 +0000 UTC m=+4394.317488598" watchObservedRunningTime="2025-12-10 08:01:14.59494456 +0000 UTC m=+4394.321609876" Dec 10 08:01:14 crc kubenswrapper[4765]: I1210 08:01:14.619756 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-l84j8" Dec 10 08:01:14 crc kubenswrapper[4765]: I1210 08:01:14.665136 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-l84j8" Dec 10 08:01:14 crc kubenswrapper[4765]: I1210 08:01:14.856654 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l84j8"] Dec 10 08:01:16 crc kubenswrapper[4765]: I1210 08:01:16.584878 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-l84j8" podUID="bd324638-c97a-4e72-bab7-6973a240af1c" containerName="registry-server" containerID="cri-o://06ac9b27ec28b2430231f60a13a425823a8709e15054849035097416ddd8edfd" gracePeriod=2 Dec 10 08:01:16 crc kubenswrapper[4765]: I1210 08:01:16.995896 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l84j8" Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.194738 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd324638-c97a-4e72-bab7-6973a240af1c-catalog-content\") pod \"bd324638-c97a-4e72-bab7-6973a240af1c\" (UID: \"bd324638-c97a-4e72-bab7-6973a240af1c\") " Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.194861 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd324638-c97a-4e72-bab7-6973a240af1c-utilities\") pod \"bd324638-c97a-4e72-bab7-6973a240af1c\" (UID: \"bd324638-c97a-4e72-bab7-6973a240af1c\") " Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.194917 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jzqm\" (UniqueName: \"kubernetes.io/projected/bd324638-c97a-4e72-bab7-6973a240af1c-kube-api-access-8jzqm\") pod \"bd324638-c97a-4e72-bab7-6973a240af1c\" (UID: \"bd324638-c97a-4e72-bab7-6973a240af1c\") " Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.195808 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd324638-c97a-4e72-bab7-6973a240af1c-utilities" (OuterVolumeSpecName: "utilities") pod "bd324638-c97a-4e72-bab7-6973a240af1c" (UID: "bd324638-c97a-4e72-bab7-6973a240af1c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.200011 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd324638-c97a-4e72-bab7-6973a240af1c-kube-api-access-8jzqm" (OuterVolumeSpecName: "kube-api-access-8jzqm") pod "bd324638-c97a-4e72-bab7-6973a240af1c" (UID: "bd324638-c97a-4e72-bab7-6973a240af1c"). InnerVolumeSpecName "kube-api-access-8jzqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.296697 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd324638-c97a-4e72-bab7-6973a240af1c-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.296741 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jzqm\" (UniqueName: \"kubernetes.io/projected/bd324638-c97a-4e72-bab7-6973a240af1c-kube-api-access-8jzqm\") on node \"crc\" DevicePath \"\"" Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.336980 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd324638-c97a-4e72-bab7-6973a240af1c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd324638-c97a-4e72-bab7-6973a240af1c" (UID: "bd324638-c97a-4e72-bab7-6973a240af1c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.398865 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd324638-c97a-4e72-bab7-6973a240af1c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.594759 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l84j8" Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.594811 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l84j8" event={"ID":"bd324638-c97a-4e72-bab7-6973a240af1c","Type":"ContainerDied","Data":"06ac9b27ec28b2430231f60a13a425823a8709e15054849035097416ddd8edfd"} Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.594870 4765 scope.go:117] "RemoveContainer" containerID="06ac9b27ec28b2430231f60a13a425823a8709e15054849035097416ddd8edfd" Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.594687 4765 generic.go:334] "Generic (PLEG): container finished" podID="bd324638-c97a-4e72-bab7-6973a240af1c" containerID="06ac9b27ec28b2430231f60a13a425823a8709e15054849035097416ddd8edfd" exitCode=0 Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.595039 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l84j8" event={"ID":"bd324638-c97a-4e72-bab7-6973a240af1c","Type":"ContainerDied","Data":"c716b70d549fd38ab424a60a478a6b82f4c15e2c33dc874178727318260e9405"} Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.615956 4765 scope.go:117] "RemoveContainer" containerID="4fa5530b2053ee5418f96700ac4653f3784d5a7498f3e026aa54edd646649ccd" Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.637913 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l84j8"] Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.643381 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-l84j8"] Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.657007 4765 scope.go:117] "RemoveContainer" containerID="60ac4e3e047e18074b98ec2bb56f853e281cfbeee3d3eb8463eb0cb1c2249351" Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.675436 4765 scope.go:117] "RemoveContainer" containerID="06ac9b27ec28b2430231f60a13a425823a8709e15054849035097416ddd8edfd" Dec 10 08:01:17 crc kubenswrapper[4765]: E1210 08:01:17.676121 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06ac9b27ec28b2430231f60a13a425823a8709e15054849035097416ddd8edfd\": container with ID starting with 06ac9b27ec28b2430231f60a13a425823a8709e15054849035097416ddd8edfd not found: ID does not exist" containerID="06ac9b27ec28b2430231f60a13a425823a8709e15054849035097416ddd8edfd" Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.676164 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06ac9b27ec28b2430231f60a13a425823a8709e15054849035097416ddd8edfd"} err="failed to get container status \"06ac9b27ec28b2430231f60a13a425823a8709e15054849035097416ddd8edfd\": rpc error: code = NotFound desc = could not find container \"06ac9b27ec28b2430231f60a13a425823a8709e15054849035097416ddd8edfd\": container with ID starting with 06ac9b27ec28b2430231f60a13a425823a8709e15054849035097416ddd8edfd not found: ID does not exist" Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.676190 4765 scope.go:117] "RemoveContainer" containerID="4fa5530b2053ee5418f96700ac4653f3784d5a7498f3e026aa54edd646649ccd" Dec 10 08:01:17 crc kubenswrapper[4765]: E1210 08:01:17.676790 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fa5530b2053ee5418f96700ac4653f3784d5a7498f3e026aa54edd646649ccd\": container with ID starting with 4fa5530b2053ee5418f96700ac4653f3784d5a7498f3e026aa54edd646649ccd not found: ID does not exist" containerID="4fa5530b2053ee5418f96700ac4653f3784d5a7498f3e026aa54edd646649ccd" Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.676914 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fa5530b2053ee5418f96700ac4653f3784d5a7498f3e026aa54edd646649ccd"} err="failed to get container status \"4fa5530b2053ee5418f96700ac4653f3784d5a7498f3e026aa54edd646649ccd\": rpc error: code = NotFound desc = could not find container \"4fa5530b2053ee5418f96700ac4653f3784d5a7498f3e026aa54edd646649ccd\": container with ID starting with 4fa5530b2053ee5418f96700ac4653f3784d5a7498f3e026aa54edd646649ccd not found: ID does not exist" Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.676985 4765 scope.go:117] "RemoveContainer" containerID="60ac4e3e047e18074b98ec2bb56f853e281cfbeee3d3eb8463eb0cb1c2249351" Dec 10 08:01:17 crc kubenswrapper[4765]: E1210 08:01:17.677675 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60ac4e3e047e18074b98ec2bb56f853e281cfbeee3d3eb8463eb0cb1c2249351\": container with ID starting with 60ac4e3e047e18074b98ec2bb56f853e281cfbeee3d3eb8463eb0cb1c2249351 not found: ID does not exist" containerID="60ac4e3e047e18074b98ec2bb56f853e281cfbeee3d3eb8463eb0cb1c2249351" Dec 10 08:01:17 crc kubenswrapper[4765]: I1210 08:01:17.677748 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60ac4e3e047e18074b98ec2bb56f853e281cfbeee3d3eb8463eb0cb1c2249351"} err="failed to get container status \"60ac4e3e047e18074b98ec2bb56f853e281cfbeee3d3eb8463eb0cb1c2249351\": rpc error: code = NotFound desc = could not find container \"60ac4e3e047e18074b98ec2bb56f853e281cfbeee3d3eb8463eb0cb1c2249351\": container with ID starting with 60ac4e3e047e18074b98ec2bb56f853e281cfbeee3d3eb8463eb0cb1c2249351 not found: ID does not exist" Dec 10 08:01:18 crc kubenswrapper[4765]: I1210 08:01:18.477273 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" Dec 10 08:01:18 crc kubenswrapper[4765]: I1210 08:01:18.600850 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd324638-c97a-4e72-bab7-6973a240af1c" path="/var/lib/kubelet/pods/bd324638-c97a-4e72-bab7-6973a240af1c/volumes" Dec 10 08:01:18 crc kubenswrapper[4765]: I1210 08:01:18.729203 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7c99fb6b65-knzvz" Dec 10 08:01:18 crc kubenswrapper[4765]: I1210 08:01:18.799821 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cddbb9455-26jjv"] Dec 10 08:01:18 crc kubenswrapper[4765]: I1210 08:01:18.803386 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" podUID="32421dc9-d4ec-48eb-bd12-ab876ea9e40d" containerName="dnsmasq-dns" containerID="cri-o://6817aadb02c1e8dc3392db718c67654ce329b8eb6a5b898637bd6591a3a00304" gracePeriod=10 Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.226994 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.332066 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8g9f9\" (UniqueName: \"kubernetes.io/projected/32421dc9-d4ec-48eb-bd12-ab876ea9e40d-kube-api-access-8g9f9\") pod \"32421dc9-d4ec-48eb-bd12-ab876ea9e40d\" (UID: \"32421dc9-d4ec-48eb-bd12-ab876ea9e40d\") " Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.332253 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/32421dc9-d4ec-48eb-bd12-ab876ea9e40d-dns-svc\") pod \"32421dc9-d4ec-48eb-bd12-ab876ea9e40d\" (UID: \"32421dc9-d4ec-48eb-bd12-ab876ea9e40d\") " Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.332516 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32421dc9-d4ec-48eb-bd12-ab876ea9e40d-config\") pod \"32421dc9-d4ec-48eb-bd12-ab876ea9e40d\" (UID: \"32421dc9-d4ec-48eb-bd12-ab876ea9e40d\") " Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.339604 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32421dc9-d4ec-48eb-bd12-ab876ea9e40d-kube-api-access-8g9f9" (OuterVolumeSpecName: "kube-api-access-8g9f9") pod "32421dc9-d4ec-48eb-bd12-ab876ea9e40d" (UID: "32421dc9-d4ec-48eb-bd12-ab876ea9e40d"). InnerVolumeSpecName "kube-api-access-8g9f9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.372711 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32421dc9-d4ec-48eb-bd12-ab876ea9e40d-config" (OuterVolumeSpecName: "config") pod "32421dc9-d4ec-48eb-bd12-ab876ea9e40d" (UID: "32421dc9-d4ec-48eb-bd12-ab876ea9e40d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.375376 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32421dc9-d4ec-48eb-bd12-ab876ea9e40d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "32421dc9-d4ec-48eb-bd12-ab876ea9e40d" (UID: "32421dc9-d4ec-48eb-bd12-ab876ea9e40d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.434209 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8g9f9\" (UniqueName: \"kubernetes.io/projected/32421dc9-d4ec-48eb-bd12-ab876ea9e40d-kube-api-access-8g9f9\") on node \"crc\" DevicePath \"\"" Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.434265 4765 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/32421dc9-d4ec-48eb-bd12-ab876ea9e40d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.434277 4765 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32421dc9-d4ec-48eb-bd12-ab876ea9e40d-config\") on node \"crc\" DevicePath \"\"" Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.615384 4765 generic.go:334] "Generic (PLEG): container finished" podID="32421dc9-d4ec-48eb-bd12-ab876ea9e40d" containerID="6817aadb02c1e8dc3392db718c67654ce329b8eb6a5b898637bd6591a3a00304" exitCode=0 Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.615447 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" event={"ID":"32421dc9-d4ec-48eb-bd12-ab876ea9e40d","Type":"ContainerDied","Data":"6817aadb02c1e8dc3392db718c67654ce329b8eb6a5b898637bd6591a3a00304"} Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.615506 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.615542 4765 scope.go:117] "RemoveContainer" containerID="6817aadb02c1e8dc3392db718c67654ce329b8eb6a5b898637bd6591a3a00304" Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.615525 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cddbb9455-26jjv" event={"ID":"32421dc9-d4ec-48eb-bd12-ab876ea9e40d","Type":"ContainerDied","Data":"f5525e39c96c32edee1d2e8e14b4f0988550f1a5aaea6d3f99fb61a2f126b394"} Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.639198 4765 scope.go:117] "RemoveContainer" containerID="041f22b5e7cb2d8d9f4e36994d3f9514f7bfed19de4665ef48f1df453cc3ecd4" Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.649544 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cddbb9455-26jjv"] Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.656873 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7cddbb9455-26jjv"] Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.676407 4765 scope.go:117] "RemoveContainer" containerID="6817aadb02c1e8dc3392db718c67654ce329b8eb6a5b898637bd6591a3a00304" Dec 10 08:01:19 crc kubenswrapper[4765]: E1210 08:01:19.676960 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6817aadb02c1e8dc3392db718c67654ce329b8eb6a5b898637bd6591a3a00304\": container with ID starting with 6817aadb02c1e8dc3392db718c67654ce329b8eb6a5b898637bd6591a3a00304 not found: ID does not exist" containerID="6817aadb02c1e8dc3392db718c67654ce329b8eb6a5b898637bd6591a3a00304" Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.676997 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6817aadb02c1e8dc3392db718c67654ce329b8eb6a5b898637bd6591a3a00304"} err="failed to get container status \"6817aadb02c1e8dc3392db718c67654ce329b8eb6a5b898637bd6591a3a00304\": rpc error: code = NotFound desc = could not find container \"6817aadb02c1e8dc3392db718c67654ce329b8eb6a5b898637bd6591a3a00304\": container with ID starting with 6817aadb02c1e8dc3392db718c67654ce329b8eb6a5b898637bd6591a3a00304 not found: ID does not exist" Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.677019 4765 scope.go:117] "RemoveContainer" containerID="041f22b5e7cb2d8d9f4e36994d3f9514f7bfed19de4665ef48f1df453cc3ecd4" Dec 10 08:01:19 crc kubenswrapper[4765]: E1210 08:01:19.677347 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"041f22b5e7cb2d8d9f4e36994d3f9514f7bfed19de4665ef48f1df453cc3ecd4\": container with ID starting with 041f22b5e7cb2d8d9f4e36994d3f9514f7bfed19de4665ef48f1df453cc3ecd4 not found: ID does not exist" containerID="041f22b5e7cb2d8d9f4e36994d3f9514f7bfed19de4665ef48f1df453cc3ecd4" Dec 10 08:01:19 crc kubenswrapper[4765]: I1210 08:01:19.677423 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"041f22b5e7cb2d8d9f4e36994d3f9514f7bfed19de4665ef48f1df453cc3ecd4"} err="failed to get container status \"041f22b5e7cb2d8d9f4e36994d3f9514f7bfed19de4665ef48f1df453cc3ecd4\": rpc error: code = NotFound desc = could not find container \"041f22b5e7cb2d8d9f4e36994d3f9514f7bfed19de4665ef48f1df453cc3ecd4\": container with ID starting with 041f22b5e7cb2d8d9f4e36994d3f9514f7bfed19de4665ef48f1df453cc3ecd4 not found: ID does not exist" Dec 10 08:01:20 crc kubenswrapper[4765]: I1210 08:01:20.603283 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32421dc9-d4ec-48eb-bd12-ab876ea9e40d" path="/var/lib/kubelet/pods/32421dc9-d4ec-48eb-bd12-ab876ea9e40d/volumes" Dec 10 08:01:27 crc kubenswrapper[4765]: I1210 08:01:27.589633 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:01:27 crc kubenswrapper[4765]: E1210 08:01:27.590602 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:01:29 crc kubenswrapper[4765]: I1210 08:01:29.699990 4765 generic.go:334] "Generic (PLEG): container finished" podID="46c457c7-93b0-495c-ae8b-dbdc2e8a605b" containerID="c53bbb2149757758adcf65610dcc4bfa1008c8f513fc5caa9681feca96db3870" exitCode=0 Dec 10 08:01:29 crc kubenswrapper[4765]: I1210 08:01:29.700313 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"46c457c7-93b0-495c-ae8b-dbdc2e8a605b","Type":"ContainerDied","Data":"c53bbb2149757758adcf65610dcc4bfa1008c8f513fc5caa9681feca96db3870"} Dec 10 08:01:30 crc kubenswrapper[4765]: I1210 08:01:30.712280 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"46c457c7-93b0-495c-ae8b-dbdc2e8a605b","Type":"ContainerStarted","Data":"1b14ab8e88956ad81503b61a432daf530fabad15ac2b739e9cc2f2ee3a427faf"} Dec 10 08:01:30 crc kubenswrapper[4765]: I1210 08:01:30.712693 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 10 08:01:30 crc kubenswrapper[4765]: I1210 08:01:30.737691 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.305768109 podStartE2EDuration="1m2.737668157s" podCreationTimestamp="2025-12-10 08:00:28 +0000 UTC" firstStartedPulling="2025-12-10 08:00:30.103588532 +0000 UTC m=+4349.830253848" lastFinishedPulling="2025-12-10 08:00:55.53548858 +0000 UTC m=+4375.262153896" observedRunningTime="2025-12-10 08:01:30.734233349 +0000 UTC m=+4410.460898685" watchObservedRunningTime="2025-12-10 08:01:30.737668157 +0000 UTC m=+4410.464333473" Dec 10 08:01:39 crc kubenswrapper[4765]: I1210 08:01:39.589494 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:01:39 crc kubenswrapper[4765]: E1210 08:01:39.590556 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:01:39 crc kubenswrapper[4765]: I1210 08:01:39.615853 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 10 08:01:50 crc kubenswrapper[4765]: I1210 08:01:50.595327 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:01:50 crc kubenswrapper[4765]: E1210 08:01:50.595899 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:02:04 crc kubenswrapper[4765]: I1210 08:02:04.590342 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:02:04 crc kubenswrapper[4765]: E1210 08:02:04.591732 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:02:17 crc kubenswrapper[4765]: I1210 08:02:17.590066 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:02:17 crc kubenswrapper[4765]: E1210 08:02:17.596818 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:02:29 crc kubenswrapper[4765]: I1210 08:02:29.589895 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:02:29 crc kubenswrapper[4765]: E1210 08:02:29.590651 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:02:42 crc kubenswrapper[4765]: I1210 08:02:42.590256 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:02:42 crc kubenswrapper[4765]: E1210 08:02:42.591677 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:02:54 crc kubenswrapper[4765]: I1210 08:02:54.589645 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:02:54 crc kubenswrapper[4765]: E1210 08:02:54.590510 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:03:05 crc kubenswrapper[4765]: I1210 08:03:05.589105 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:03:05 crc kubenswrapper[4765]: E1210 08:03:05.589823 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:03:17 crc kubenswrapper[4765]: I1210 08:03:17.589004 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:03:17 crc kubenswrapper[4765]: E1210 08:03:17.589958 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:03:30 crc kubenswrapper[4765]: I1210 08:03:30.432900 4765 scope.go:117] "RemoveContainer" containerID="415bcb3c819b3f2535d669966ffbbbde31704289626fef72b7e8d1a12198e87a" Dec 10 08:03:31 crc kubenswrapper[4765]: I1210 08:03:31.589728 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:03:31 crc kubenswrapper[4765]: E1210 08:03:31.590365 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:03:45 crc kubenswrapper[4765]: I1210 08:03:45.589301 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:03:45 crc kubenswrapper[4765]: E1210 08:03:45.590119 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:03:58 crc kubenswrapper[4765]: I1210 08:03:58.589552 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:03:58 crc kubenswrapper[4765]: E1210 08:03:58.590333 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:04:12 crc kubenswrapper[4765]: I1210 08:04:12.589154 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:04:12 crc kubenswrapper[4765]: E1210 08:04:12.589893 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:04:23 crc kubenswrapper[4765]: I1210 08:04:23.588642 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:04:23 crc kubenswrapper[4765]: E1210 08:04:23.590886 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:04:35 crc kubenswrapper[4765]: I1210 08:04:35.588992 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:04:35 crc kubenswrapper[4765]: E1210 08:04:35.589912 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:04:48 crc kubenswrapper[4765]: I1210 08:04:48.589964 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:04:48 crc kubenswrapper[4765]: E1210 08:04:48.590816 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:05:03 crc kubenswrapper[4765]: I1210 08:05:03.589625 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:05:03 crc kubenswrapper[4765]: E1210 08:05:03.590439 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.021910 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kjqwc"] Dec 10 08:05:04 crc kubenswrapper[4765]: E1210 08:05:04.022316 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32421dc9-d4ec-48eb-bd12-ab876ea9e40d" containerName="dnsmasq-dns" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.022339 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="32421dc9-d4ec-48eb-bd12-ab876ea9e40d" containerName="dnsmasq-dns" Dec 10 08:05:04 crc kubenswrapper[4765]: E1210 08:05:04.022356 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd324638-c97a-4e72-bab7-6973a240af1c" containerName="registry-server" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.022364 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd324638-c97a-4e72-bab7-6973a240af1c" containerName="registry-server" Dec 10 08:05:04 crc kubenswrapper[4765]: E1210 08:05:04.022401 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd324638-c97a-4e72-bab7-6973a240af1c" containerName="extract-utilities" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.022413 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd324638-c97a-4e72-bab7-6973a240af1c" containerName="extract-utilities" Dec 10 08:05:04 crc kubenswrapper[4765]: E1210 08:05:04.022426 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd324638-c97a-4e72-bab7-6973a240af1c" containerName="extract-content" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.022433 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd324638-c97a-4e72-bab7-6973a240af1c" containerName="extract-content" Dec 10 08:05:04 crc kubenswrapper[4765]: E1210 08:05:04.022443 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32421dc9-d4ec-48eb-bd12-ab876ea9e40d" containerName="init" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.022450 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="32421dc9-d4ec-48eb-bd12-ab876ea9e40d" containerName="init" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.023170 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd324638-c97a-4e72-bab7-6973a240af1c" containerName="registry-server" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.023203 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="32421dc9-d4ec-48eb-bd12-ab876ea9e40d" containerName="dnsmasq-dns" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.026242 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kjqwc" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.041381 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kjqwc"] Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.193227 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54674f39-c915-49d6-8c76-eb158e67f584-utilities\") pod \"certified-operators-kjqwc\" (UID: \"54674f39-c915-49d6-8c76-eb158e67f584\") " pod="openshift-marketplace/certified-operators-kjqwc" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.193302 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bblt\" (UniqueName: \"kubernetes.io/projected/54674f39-c915-49d6-8c76-eb158e67f584-kube-api-access-4bblt\") pod \"certified-operators-kjqwc\" (UID: \"54674f39-c915-49d6-8c76-eb158e67f584\") " pod="openshift-marketplace/certified-operators-kjqwc" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.193765 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54674f39-c915-49d6-8c76-eb158e67f584-catalog-content\") pod \"certified-operators-kjqwc\" (UID: \"54674f39-c915-49d6-8c76-eb158e67f584\") " pod="openshift-marketplace/certified-operators-kjqwc" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.295147 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54674f39-c915-49d6-8c76-eb158e67f584-utilities\") pod \"certified-operators-kjqwc\" (UID: \"54674f39-c915-49d6-8c76-eb158e67f584\") " pod="openshift-marketplace/certified-operators-kjqwc" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.295219 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bblt\" (UniqueName: \"kubernetes.io/projected/54674f39-c915-49d6-8c76-eb158e67f584-kube-api-access-4bblt\") pod \"certified-operators-kjqwc\" (UID: \"54674f39-c915-49d6-8c76-eb158e67f584\") " pod="openshift-marketplace/certified-operators-kjqwc" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.295296 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54674f39-c915-49d6-8c76-eb158e67f584-catalog-content\") pod \"certified-operators-kjqwc\" (UID: \"54674f39-c915-49d6-8c76-eb158e67f584\") " pod="openshift-marketplace/certified-operators-kjqwc" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.295767 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54674f39-c915-49d6-8c76-eb158e67f584-catalog-content\") pod \"certified-operators-kjqwc\" (UID: \"54674f39-c915-49d6-8c76-eb158e67f584\") " pod="openshift-marketplace/certified-operators-kjqwc" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.296039 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54674f39-c915-49d6-8c76-eb158e67f584-utilities\") pod \"certified-operators-kjqwc\" (UID: \"54674f39-c915-49d6-8c76-eb158e67f584\") " pod="openshift-marketplace/certified-operators-kjqwc" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.316622 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bblt\" (UniqueName: \"kubernetes.io/projected/54674f39-c915-49d6-8c76-eb158e67f584-kube-api-access-4bblt\") pod \"certified-operators-kjqwc\" (UID: \"54674f39-c915-49d6-8c76-eb158e67f584\") " pod="openshift-marketplace/certified-operators-kjqwc" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.353810 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kjqwc" Dec 10 08:05:04 crc kubenswrapper[4765]: I1210 08:05:04.907136 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kjqwc"] Dec 10 08:05:05 crc kubenswrapper[4765]: I1210 08:05:05.432850 4765 generic.go:334] "Generic (PLEG): container finished" podID="54674f39-c915-49d6-8c76-eb158e67f584" containerID="5e4e68117a11eb75b69f5d0808c6debdb60913f9bd74bebfad094a40d2fab8e1" exitCode=0 Dec 10 08:05:05 crc kubenswrapper[4765]: I1210 08:05:05.433028 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kjqwc" event={"ID":"54674f39-c915-49d6-8c76-eb158e67f584","Type":"ContainerDied","Data":"5e4e68117a11eb75b69f5d0808c6debdb60913f9bd74bebfad094a40d2fab8e1"} Dec 10 08:05:05 crc kubenswrapper[4765]: I1210 08:05:05.433243 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kjqwc" event={"ID":"54674f39-c915-49d6-8c76-eb158e67f584","Type":"ContainerStarted","Data":"cfe4b054bbb5b8ff146569fb5613ab208abefbd7de139cfa2db25a5d30506539"} Dec 10 08:05:05 crc kubenswrapper[4765]: I1210 08:05:05.435425 4765 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 08:05:06 crc kubenswrapper[4765]: I1210 08:05:06.441334 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kjqwc" event={"ID":"54674f39-c915-49d6-8c76-eb158e67f584","Type":"ContainerStarted","Data":"befe7351b30e6e098d84f946044916616c165ef311508c39e6375352a158f8c8"} Dec 10 08:05:07 crc kubenswrapper[4765]: I1210 08:05:07.455928 4765 generic.go:334] "Generic (PLEG): container finished" podID="54674f39-c915-49d6-8c76-eb158e67f584" containerID="befe7351b30e6e098d84f946044916616c165ef311508c39e6375352a158f8c8" exitCode=0 Dec 10 08:05:07 crc kubenswrapper[4765]: I1210 08:05:07.456028 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kjqwc" event={"ID":"54674f39-c915-49d6-8c76-eb158e67f584","Type":"ContainerDied","Data":"befe7351b30e6e098d84f946044916616c165ef311508c39e6375352a158f8c8"} Dec 10 08:05:09 crc kubenswrapper[4765]: I1210 08:05:09.475403 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kjqwc" event={"ID":"54674f39-c915-49d6-8c76-eb158e67f584","Type":"ContainerStarted","Data":"16eab5f3a718c2d84154516b2700d4852ca6f62c2d51096835ddf412edffb91d"} Dec 10 08:05:09 crc kubenswrapper[4765]: I1210 08:05:09.497484 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kjqwc" podStartSLOduration=2.394325295 podStartE2EDuration="5.497459981s" podCreationTimestamp="2025-12-10 08:05:04 +0000 UTC" firstStartedPulling="2025-12-10 08:05:05.435049899 +0000 UTC m=+4625.161715215" lastFinishedPulling="2025-12-10 08:05:08.538184585 +0000 UTC m=+4628.264849901" observedRunningTime="2025-12-10 08:05:09.49041624 +0000 UTC m=+4629.217081556" watchObservedRunningTime="2025-12-10 08:05:09.497459981 +0000 UTC m=+4629.224125297" Dec 10 08:05:14 crc kubenswrapper[4765]: I1210 08:05:14.354678 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kjqwc" Dec 10 08:05:14 crc kubenswrapper[4765]: I1210 08:05:14.355176 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kjqwc" Dec 10 08:05:14 crc kubenswrapper[4765]: I1210 08:05:14.402604 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kjqwc" Dec 10 08:05:14 crc kubenswrapper[4765]: I1210 08:05:14.548191 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kjqwc" Dec 10 08:05:17 crc kubenswrapper[4765]: I1210 08:05:17.589649 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:05:17 crc kubenswrapper[4765]: E1210 08:05:17.590427 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:05:17 crc kubenswrapper[4765]: I1210 08:05:17.807432 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kjqwc"] Dec 10 08:05:17 crc kubenswrapper[4765]: I1210 08:05:17.807673 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-kjqwc" podUID="54674f39-c915-49d6-8c76-eb158e67f584" containerName="registry-server" containerID="cri-o://16eab5f3a718c2d84154516b2700d4852ca6f62c2d51096835ddf412edffb91d" gracePeriod=2 Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.327513 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kjqwc" Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.452675 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54674f39-c915-49d6-8c76-eb158e67f584-catalog-content\") pod \"54674f39-c915-49d6-8c76-eb158e67f584\" (UID: \"54674f39-c915-49d6-8c76-eb158e67f584\") " Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.452820 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bblt\" (UniqueName: \"kubernetes.io/projected/54674f39-c915-49d6-8c76-eb158e67f584-kube-api-access-4bblt\") pod \"54674f39-c915-49d6-8c76-eb158e67f584\" (UID: \"54674f39-c915-49d6-8c76-eb158e67f584\") " Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.452900 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54674f39-c915-49d6-8c76-eb158e67f584-utilities\") pod \"54674f39-c915-49d6-8c76-eb158e67f584\" (UID: \"54674f39-c915-49d6-8c76-eb158e67f584\") " Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.454419 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54674f39-c915-49d6-8c76-eb158e67f584-utilities" (OuterVolumeSpecName: "utilities") pod "54674f39-c915-49d6-8c76-eb158e67f584" (UID: "54674f39-c915-49d6-8c76-eb158e67f584"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.458939 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54674f39-c915-49d6-8c76-eb158e67f584-kube-api-access-4bblt" (OuterVolumeSpecName: "kube-api-access-4bblt") pod "54674f39-c915-49d6-8c76-eb158e67f584" (UID: "54674f39-c915-49d6-8c76-eb158e67f584"). InnerVolumeSpecName "kube-api-access-4bblt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.510292 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54674f39-c915-49d6-8c76-eb158e67f584-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "54674f39-c915-49d6-8c76-eb158e67f584" (UID: "54674f39-c915-49d6-8c76-eb158e67f584"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.551509 4765 generic.go:334] "Generic (PLEG): container finished" podID="54674f39-c915-49d6-8c76-eb158e67f584" containerID="16eab5f3a718c2d84154516b2700d4852ca6f62c2d51096835ddf412edffb91d" exitCode=0 Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.551569 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kjqwc" event={"ID":"54674f39-c915-49d6-8c76-eb158e67f584","Type":"ContainerDied","Data":"16eab5f3a718c2d84154516b2700d4852ca6f62c2d51096835ddf412edffb91d"} Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.551605 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kjqwc" event={"ID":"54674f39-c915-49d6-8c76-eb158e67f584","Type":"ContainerDied","Data":"cfe4b054bbb5b8ff146569fb5613ab208abefbd7de139cfa2db25a5d30506539"} Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.551604 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kjqwc" Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.551626 4765 scope.go:117] "RemoveContainer" containerID="16eab5f3a718c2d84154516b2700d4852ca6f62c2d51096835ddf412edffb91d" Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.555379 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54674f39-c915-49d6-8c76-eb158e67f584-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.555409 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bblt\" (UniqueName: \"kubernetes.io/projected/54674f39-c915-49d6-8c76-eb158e67f584-kube-api-access-4bblt\") on node \"crc\" DevicePath \"\"" Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.555420 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54674f39-c915-49d6-8c76-eb158e67f584-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.579030 4765 scope.go:117] "RemoveContainer" containerID="befe7351b30e6e098d84f946044916616c165ef311508c39e6375352a158f8c8" Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.602029 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kjqwc"] Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.612297 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-kjqwc"] Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.615821 4765 scope.go:117] "RemoveContainer" containerID="5e4e68117a11eb75b69f5d0808c6debdb60913f9bd74bebfad094a40d2fab8e1" Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.638644 4765 scope.go:117] "RemoveContainer" containerID="16eab5f3a718c2d84154516b2700d4852ca6f62c2d51096835ddf412edffb91d" Dec 10 08:05:19 crc kubenswrapper[4765]: E1210 08:05:19.639190 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16eab5f3a718c2d84154516b2700d4852ca6f62c2d51096835ddf412edffb91d\": container with ID starting with 16eab5f3a718c2d84154516b2700d4852ca6f62c2d51096835ddf412edffb91d not found: ID does not exist" containerID="16eab5f3a718c2d84154516b2700d4852ca6f62c2d51096835ddf412edffb91d" Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.639227 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16eab5f3a718c2d84154516b2700d4852ca6f62c2d51096835ddf412edffb91d"} err="failed to get container status \"16eab5f3a718c2d84154516b2700d4852ca6f62c2d51096835ddf412edffb91d\": rpc error: code = NotFound desc = could not find container \"16eab5f3a718c2d84154516b2700d4852ca6f62c2d51096835ddf412edffb91d\": container with ID starting with 16eab5f3a718c2d84154516b2700d4852ca6f62c2d51096835ddf412edffb91d not found: ID does not exist" Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.639247 4765 scope.go:117] "RemoveContainer" containerID="befe7351b30e6e098d84f946044916616c165ef311508c39e6375352a158f8c8" Dec 10 08:05:19 crc kubenswrapper[4765]: E1210 08:05:19.639685 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"befe7351b30e6e098d84f946044916616c165ef311508c39e6375352a158f8c8\": container with ID starting with befe7351b30e6e098d84f946044916616c165ef311508c39e6375352a158f8c8 not found: ID does not exist" containerID="befe7351b30e6e098d84f946044916616c165ef311508c39e6375352a158f8c8" Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.639712 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"befe7351b30e6e098d84f946044916616c165ef311508c39e6375352a158f8c8"} err="failed to get container status \"befe7351b30e6e098d84f946044916616c165ef311508c39e6375352a158f8c8\": rpc error: code = NotFound desc = could not find container \"befe7351b30e6e098d84f946044916616c165ef311508c39e6375352a158f8c8\": container with ID starting with befe7351b30e6e098d84f946044916616c165ef311508c39e6375352a158f8c8 not found: ID does not exist" Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.639727 4765 scope.go:117] "RemoveContainer" containerID="5e4e68117a11eb75b69f5d0808c6debdb60913f9bd74bebfad094a40d2fab8e1" Dec 10 08:05:19 crc kubenswrapper[4765]: E1210 08:05:19.640011 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e4e68117a11eb75b69f5d0808c6debdb60913f9bd74bebfad094a40d2fab8e1\": container with ID starting with 5e4e68117a11eb75b69f5d0808c6debdb60913f9bd74bebfad094a40d2fab8e1 not found: ID does not exist" containerID="5e4e68117a11eb75b69f5d0808c6debdb60913f9bd74bebfad094a40d2fab8e1" Dec 10 08:05:19 crc kubenswrapper[4765]: I1210 08:05:19.640037 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e4e68117a11eb75b69f5d0808c6debdb60913f9bd74bebfad094a40d2fab8e1"} err="failed to get container status \"5e4e68117a11eb75b69f5d0808c6debdb60913f9bd74bebfad094a40d2fab8e1\": rpc error: code = NotFound desc = could not find container \"5e4e68117a11eb75b69f5d0808c6debdb60913f9bd74bebfad094a40d2fab8e1\": container with ID starting with 5e4e68117a11eb75b69f5d0808c6debdb60913f9bd74bebfad094a40d2fab8e1 not found: ID does not exist" Dec 10 08:05:20 crc kubenswrapper[4765]: I1210 08:05:20.600507 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54674f39-c915-49d6-8c76-eb158e67f584" path="/var/lib/kubelet/pods/54674f39-c915-49d6-8c76-eb158e67f584/volumes" Dec 10 08:05:31 crc kubenswrapper[4765]: I1210 08:05:31.589691 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:05:31 crc kubenswrapper[4765]: E1210 08:05:31.590868 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:05:46 crc kubenswrapper[4765]: I1210 08:05:46.590639 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:05:47 crc kubenswrapper[4765]: I1210 08:05:47.773696 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"36428c9b79bcd275e10b5f0f6d13bce947dacd2ed8442b694ef4b92da1180efd"} Dec 10 08:06:54 crc kubenswrapper[4765]: I1210 08:06:54.253562 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-v29rk/must-gather-zbnvv"] Dec 10 08:06:54 crc kubenswrapper[4765]: E1210 08:06:54.254771 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54674f39-c915-49d6-8c76-eb158e67f584" containerName="extract-utilities" Dec 10 08:06:54 crc kubenswrapper[4765]: I1210 08:06:54.254791 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="54674f39-c915-49d6-8c76-eb158e67f584" containerName="extract-utilities" Dec 10 08:06:54 crc kubenswrapper[4765]: E1210 08:06:54.254806 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54674f39-c915-49d6-8c76-eb158e67f584" containerName="extract-content" Dec 10 08:06:54 crc kubenswrapper[4765]: I1210 08:06:54.254814 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="54674f39-c915-49d6-8c76-eb158e67f584" containerName="extract-content" Dec 10 08:06:54 crc kubenswrapper[4765]: E1210 08:06:54.254846 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54674f39-c915-49d6-8c76-eb158e67f584" containerName="registry-server" Dec 10 08:06:54 crc kubenswrapper[4765]: I1210 08:06:54.254854 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="54674f39-c915-49d6-8c76-eb158e67f584" containerName="registry-server" Dec 10 08:06:54 crc kubenswrapper[4765]: I1210 08:06:54.255013 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="54674f39-c915-49d6-8c76-eb158e67f584" containerName="registry-server" Dec 10 08:06:54 crc kubenswrapper[4765]: I1210 08:06:54.256075 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-v29rk/must-gather-zbnvv" Dec 10 08:06:54 crc kubenswrapper[4765]: I1210 08:06:54.259777 4765 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-v29rk"/"default-dockercfg-kmrt6" Dec 10 08:06:54 crc kubenswrapper[4765]: I1210 08:06:54.259966 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-v29rk"/"kube-root-ca.crt" Dec 10 08:06:54 crc kubenswrapper[4765]: I1210 08:06:54.260234 4765 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-v29rk"/"openshift-service-ca.crt" Dec 10 08:06:54 crc kubenswrapper[4765]: I1210 08:06:54.264626 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-v29rk/must-gather-zbnvv"] Dec 10 08:06:54 crc kubenswrapper[4765]: I1210 08:06:54.329442 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f5fe2052-71f7-4aa3-a719-de45bd724fb3-must-gather-output\") pod \"must-gather-zbnvv\" (UID: \"f5fe2052-71f7-4aa3-a719-de45bd724fb3\") " pod="openshift-must-gather-v29rk/must-gather-zbnvv" Dec 10 08:06:54 crc kubenswrapper[4765]: I1210 08:06:54.329502 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2msd5\" (UniqueName: \"kubernetes.io/projected/f5fe2052-71f7-4aa3-a719-de45bd724fb3-kube-api-access-2msd5\") pod \"must-gather-zbnvv\" (UID: \"f5fe2052-71f7-4aa3-a719-de45bd724fb3\") " pod="openshift-must-gather-v29rk/must-gather-zbnvv" Dec 10 08:06:54 crc kubenswrapper[4765]: I1210 08:06:54.431394 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f5fe2052-71f7-4aa3-a719-de45bd724fb3-must-gather-output\") pod \"must-gather-zbnvv\" (UID: \"f5fe2052-71f7-4aa3-a719-de45bd724fb3\") " pod="openshift-must-gather-v29rk/must-gather-zbnvv" Dec 10 08:06:54 crc kubenswrapper[4765]: I1210 08:06:54.431471 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2msd5\" (UniqueName: \"kubernetes.io/projected/f5fe2052-71f7-4aa3-a719-de45bd724fb3-kube-api-access-2msd5\") pod \"must-gather-zbnvv\" (UID: \"f5fe2052-71f7-4aa3-a719-de45bd724fb3\") " pod="openshift-must-gather-v29rk/must-gather-zbnvv" Dec 10 08:06:54 crc kubenswrapper[4765]: I1210 08:06:54.432126 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f5fe2052-71f7-4aa3-a719-de45bd724fb3-must-gather-output\") pod \"must-gather-zbnvv\" (UID: \"f5fe2052-71f7-4aa3-a719-de45bd724fb3\") " pod="openshift-must-gather-v29rk/must-gather-zbnvv" Dec 10 08:06:54 crc kubenswrapper[4765]: I1210 08:06:54.457660 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2msd5\" (UniqueName: \"kubernetes.io/projected/f5fe2052-71f7-4aa3-a719-de45bd724fb3-kube-api-access-2msd5\") pod \"must-gather-zbnvv\" (UID: \"f5fe2052-71f7-4aa3-a719-de45bd724fb3\") " pod="openshift-must-gather-v29rk/must-gather-zbnvv" Dec 10 08:06:54 crc kubenswrapper[4765]: I1210 08:06:54.578598 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-v29rk/must-gather-zbnvv" Dec 10 08:06:55 crc kubenswrapper[4765]: I1210 08:06:55.077966 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-v29rk/must-gather-zbnvv"] Dec 10 08:06:55 crc kubenswrapper[4765]: I1210 08:06:55.282713 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-v29rk/must-gather-zbnvv" event={"ID":"f5fe2052-71f7-4aa3-a719-de45bd724fb3","Type":"ContainerStarted","Data":"4c81f6e260fa58aa37e9c5dfa4fbcfbb211391e045ac95a20c422aa163655208"} Dec 10 08:07:02 crc kubenswrapper[4765]: I1210 08:07:02.348142 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-v29rk/must-gather-zbnvv" event={"ID":"f5fe2052-71f7-4aa3-a719-de45bd724fb3","Type":"ContainerStarted","Data":"2bdc3c8f9e0bb315dcf24204b5cab4779d09a6c34cdd2edd8396b40c148f4354"} Dec 10 08:07:03 crc kubenswrapper[4765]: I1210 08:07:03.360110 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-v29rk/must-gather-zbnvv" event={"ID":"f5fe2052-71f7-4aa3-a719-de45bd724fb3","Type":"ContainerStarted","Data":"15e9069211c8ee280609756e12c3b86da9c2242833e50dcd936d0067819cd8a2"} Dec 10 08:07:03 crc kubenswrapper[4765]: I1210 08:07:03.380723 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-v29rk/must-gather-zbnvv" podStartSLOduration=2.546490512 podStartE2EDuration="9.38069777s" podCreationTimestamp="2025-12-10 08:06:54 +0000 UTC" firstStartedPulling="2025-12-10 08:06:55.101591357 +0000 UTC m=+4734.828256663" lastFinishedPulling="2025-12-10 08:07:01.935798605 +0000 UTC m=+4741.662463921" observedRunningTime="2025-12-10 08:07:03.373587147 +0000 UTC m=+4743.100252493" watchObservedRunningTime="2025-12-10 08:07:03.38069777 +0000 UTC m=+4743.107363086" Dec 10 08:07:47 crc kubenswrapper[4765]: I1210 08:07:47.927983 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7c99fb6b65-knzvz_a8e07587-c542-491a-bef5-c968726115c4/init/0.log" Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.047524 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tk9z5"] Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.049363 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tk9z5" Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.061492 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tk9z5"] Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.154637 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7c99fb6b65-knzvz_a8e07587-c542-491a-bef5-c968726115c4/init/0.log" Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.222794 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eef9ed53-f133-442e-93cc-3a37a1daa6e7-catalog-content\") pod \"community-operators-tk9z5\" (UID: \"eef9ed53-f133-442e-93cc-3a37a1daa6e7\") " pod="openshift-marketplace/community-operators-tk9z5" Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.222907 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eef9ed53-f133-442e-93cc-3a37a1daa6e7-utilities\") pod \"community-operators-tk9z5\" (UID: \"eef9ed53-f133-442e-93cc-3a37a1daa6e7\") " pod="openshift-marketplace/community-operators-tk9z5" Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.222941 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tvmw\" (UniqueName: \"kubernetes.io/projected/eef9ed53-f133-442e-93cc-3a37a1daa6e7-kube-api-access-4tvmw\") pod \"community-operators-tk9z5\" (UID: \"eef9ed53-f133-442e-93cc-3a37a1daa6e7\") " pod="openshift-marketplace/community-operators-tk9z5" Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.261551 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7c99fb6b65-knzvz_a8e07587-c542-491a-bef5-c968726115c4/dnsmasq-dns/0.log" Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.324321 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eef9ed53-f133-442e-93cc-3a37a1daa6e7-catalog-content\") pod \"community-operators-tk9z5\" (UID: \"eef9ed53-f133-442e-93cc-3a37a1daa6e7\") " pod="openshift-marketplace/community-operators-tk9z5" Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.324408 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eef9ed53-f133-442e-93cc-3a37a1daa6e7-utilities\") pod \"community-operators-tk9z5\" (UID: \"eef9ed53-f133-442e-93cc-3a37a1daa6e7\") " pod="openshift-marketplace/community-operators-tk9z5" Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.324439 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tvmw\" (UniqueName: \"kubernetes.io/projected/eef9ed53-f133-442e-93cc-3a37a1daa6e7-kube-api-access-4tvmw\") pod \"community-operators-tk9z5\" (UID: \"eef9ed53-f133-442e-93cc-3a37a1daa6e7\") " pod="openshift-marketplace/community-operators-tk9z5" Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.324870 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eef9ed53-f133-442e-93cc-3a37a1daa6e7-catalog-content\") pod \"community-operators-tk9z5\" (UID: \"eef9ed53-f133-442e-93cc-3a37a1daa6e7\") " pod="openshift-marketplace/community-operators-tk9z5" Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.324897 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eef9ed53-f133-442e-93cc-3a37a1daa6e7-utilities\") pod \"community-operators-tk9z5\" (UID: \"eef9ed53-f133-442e-93cc-3a37a1daa6e7\") " pod="openshift-marketplace/community-operators-tk9z5" Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.353491 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tvmw\" (UniqueName: \"kubernetes.io/projected/eef9ed53-f133-442e-93cc-3a37a1daa6e7-kube-api-access-4tvmw\") pod \"community-operators-tk9z5\" (UID: \"eef9ed53-f133-442e-93cc-3a37a1daa6e7\") " pod="openshift-marketplace/community-operators-tk9z5" Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.370194 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tk9z5" Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.395245 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_0866a219-924f-427d-98a9-2b490ca24c9c/memcached/0.log" Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.721686 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tk9z5"] Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.800382 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tk9z5" event={"ID":"eef9ed53-f133-442e-93cc-3a37a1daa6e7","Type":"ContainerStarted","Data":"87ab9f86fe98ae06b39e84e5f7a8614cec3ba2a2daf865bbb350ebde1999b828"} Dec 10 08:07:48 crc kubenswrapper[4765]: I1210 08:07:48.862317 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_97649c65-e9e3-45cb-9570-2dd2cb9bc1b3/mysql-bootstrap/0.log" Dec 10 08:07:49 crc kubenswrapper[4765]: I1210 08:07:49.072976 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_97649c65-e9e3-45cb-9570-2dd2cb9bc1b3/mysql-bootstrap/0.log" Dec 10 08:07:49 crc kubenswrapper[4765]: I1210 08:07:49.119396 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c9b61338-8aac-4f36-9bfc-0ec7601c9345/mysql-bootstrap/0.log" Dec 10 08:07:49 crc kubenswrapper[4765]: I1210 08:07:49.123414 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_97649c65-e9e3-45cb-9570-2dd2cb9bc1b3/galera/0.log" Dec 10 08:07:49 crc kubenswrapper[4765]: I1210 08:07:49.344666 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c9b61338-8aac-4f36-9bfc-0ec7601c9345/galera/0.log" Dec 10 08:07:49 crc kubenswrapper[4765]: I1210 08:07:49.363480 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c9b61338-8aac-4f36-9bfc-0ec7601c9345/mysql-bootstrap/0.log" Dec 10 08:07:49 crc kubenswrapper[4765]: I1210 08:07:49.399660 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_46c457c7-93b0-495c-ae8b-dbdc2e8a605b/setup-container/0.log" Dec 10 08:07:49 crc kubenswrapper[4765]: I1210 08:07:49.578543 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_46c457c7-93b0-495c-ae8b-dbdc2e8a605b/setup-container/0.log" Dec 10 08:07:49 crc kubenswrapper[4765]: I1210 08:07:49.648817 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_46c457c7-93b0-495c-ae8b-dbdc2e8a605b/rabbitmq/0.log" Dec 10 08:07:49 crc kubenswrapper[4765]: I1210 08:07:49.809730 4765 generic.go:334] "Generic (PLEG): container finished" podID="eef9ed53-f133-442e-93cc-3a37a1daa6e7" containerID="9a5477f8de9dd4d02e94d2046036ba7c07d3d265b6cdabd391d0b8678b2897ea" exitCode=0 Dec 10 08:07:49 crc kubenswrapper[4765]: I1210 08:07:49.809819 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tk9z5" event={"ID":"eef9ed53-f133-442e-93cc-3a37a1daa6e7","Type":"ContainerDied","Data":"9a5477f8de9dd4d02e94d2046036ba7c07d3d265b6cdabd391d0b8678b2897ea"} Dec 10 08:07:51 crc kubenswrapper[4765]: I1210 08:07:51.830414 4765 generic.go:334] "Generic (PLEG): container finished" podID="eef9ed53-f133-442e-93cc-3a37a1daa6e7" containerID="04ff999c7a7632eb0faaf34ef5dc8ff93dff18224a30fe0e1bf9ef0a1f4b235f" exitCode=0 Dec 10 08:07:51 crc kubenswrapper[4765]: I1210 08:07:51.830532 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tk9z5" event={"ID":"eef9ed53-f133-442e-93cc-3a37a1daa6e7","Type":"ContainerDied","Data":"04ff999c7a7632eb0faaf34ef5dc8ff93dff18224a30fe0e1bf9ef0a1f4b235f"} Dec 10 08:07:53 crc kubenswrapper[4765]: I1210 08:07:53.848541 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tk9z5" event={"ID":"eef9ed53-f133-442e-93cc-3a37a1daa6e7","Type":"ContainerStarted","Data":"d9920b96fe48040eb0ccd1afebeea1b588a28113244436b3929cd410efd4b51b"} Dec 10 08:07:53 crc kubenswrapper[4765]: I1210 08:07:53.871504 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tk9z5" podStartSLOduration=3.000912943 podStartE2EDuration="5.871484316s" podCreationTimestamp="2025-12-10 08:07:48 +0000 UTC" firstStartedPulling="2025-12-10 08:07:49.81171635 +0000 UTC m=+4789.538381666" lastFinishedPulling="2025-12-10 08:07:52.682287723 +0000 UTC m=+4792.408953039" observedRunningTime="2025-12-10 08:07:53.86779319 +0000 UTC m=+4793.594458516" watchObservedRunningTime="2025-12-10 08:07:53.871484316 +0000 UTC m=+4793.598149632" Dec 10 08:07:58 crc kubenswrapper[4765]: I1210 08:07:58.370452 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tk9z5" Dec 10 08:07:58 crc kubenswrapper[4765]: I1210 08:07:58.370939 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tk9z5" Dec 10 08:07:58 crc kubenswrapper[4765]: I1210 08:07:58.417248 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tk9z5" Dec 10 08:07:58 crc kubenswrapper[4765]: I1210 08:07:58.922207 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tk9z5" Dec 10 08:07:58 crc kubenswrapper[4765]: I1210 08:07:58.968481 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tk9z5"] Dec 10 08:08:00 crc kubenswrapper[4765]: I1210 08:08:00.896556 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tk9z5" podUID="eef9ed53-f133-442e-93cc-3a37a1daa6e7" containerName="registry-server" containerID="cri-o://d9920b96fe48040eb0ccd1afebeea1b588a28113244436b3929cd410efd4b51b" gracePeriod=2 Dec 10 08:08:01 crc kubenswrapper[4765]: I1210 08:08:01.834784 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tk9z5" Dec 10 08:08:01 crc kubenswrapper[4765]: I1210 08:08:01.910553 4765 generic.go:334] "Generic (PLEG): container finished" podID="eef9ed53-f133-442e-93cc-3a37a1daa6e7" containerID="d9920b96fe48040eb0ccd1afebeea1b588a28113244436b3929cd410efd4b51b" exitCode=0 Dec 10 08:08:01 crc kubenswrapper[4765]: I1210 08:08:01.910616 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tk9z5" event={"ID":"eef9ed53-f133-442e-93cc-3a37a1daa6e7","Type":"ContainerDied","Data":"d9920b96fe48040eb0ccd1afebeea1b588a28113244436b3929cd410efd4b51b"} Dec 10 08:08:01 crc kubenswrapper[4765]: I1210 08:08:01.910650 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tk9z5" event={"ID":"eef9ed53-f133-442e-93cc-3a37a1daa6e7","Type":"ContainerDied","Data":"87ab9f86fe98ae06b39e84e5f7a8614cec3ba2a2daf865bbb350ebde1999b828"} Dec 10 08:08:01 crc kubenswrapper[4765]: I1210 08:08:01.910668 4765 scope.go:117] "RemoveContainer" containerID="d9920b96fe48040eb0ccd1afebeea1b588a28113244436b3929cd410efd4b51b" Dec 10 08:08:01 crc kubenswrapper[4765]: I1210 08:08:01.910879 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tk9z5" Dec 10 08:08:01 crc kubenswrapper[4765]: I1210 08:08:01.933207 4765 scope.go:117] "RemoveContainer" containerID="04ff999c7a7632eb0faaf34ef5dc8ff93dff18224a30fe0e1bf9ef0a1f4b235f" Dec 10 08:08:01 crc kubenswrapper[4765]: I1210 08:08:01.952579 4765 scope.go:117] "RemoveContainer" containerID="9a5477f8de9dd4d02e94d2046036ba7c07d3d265b6cdabd391d0b8678b2897ea" Dec 10 08:08:01 crc kubenswrapper[4765]: I1210 08:08:01.984950 4765 scope.go:117] "RemoveContainer" containerID="d9920b96fe48040eb0ccd1afebeea1b588a28113244436b3929cd410efd4b51b" Dec 10 08:08:01 crc kubenswrapper[4765]: E1210 08:08:01.985460 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9920b96fe48040eb0ccd1afebeea1b588a28113244436b3929cd410efd4b51b\": container with ID starting with d9920b96fe48040eb0ccd1afebeea1b588a28113244436b3929cd410efd4b51b not found: ID does not exist" containerID="d9920b96fe48040eb0ccd1afebeea1b588a28113244436b3929cd410efd4b51b" Dec 10 08:08:01 crc kubenswrapper[4765]: I1210 08:08:01.985495 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9920b96fe48040eb0ccd1afebeea1b588a28113244436b3929cd410efd4b51b"} err="failed to get container status \"d9920b96fe48040eb0ccd1afebeea1b588a28113244436b3929cd410efd4b51b\": rpc error: code = NotFound desc = could not find container \"d9920b96fe48040eb0ccd1afebeea1b588a28113244436b3929cd410efd4b51b\": container with ID starting with d9920b96fe48040eb0ccd1afebeea1b588a28113244436b3929cd410efd4b51b not found: ID does not exist" Dec 10 08:08:01 crc kubenswrapper[4765]: I1210 08:08:01.985520 4765 scope.go:117] "RemoveContainer" containerID="04ff999c7a7632eb0faaf34ef5dc8ff93dff18224a30fe0e1bf9ef0a1f4b235f" Dec 10 08:08:01 crc kubenswrapper[4765]: E1210 08:08:01.985933 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04ff999c7a7632eb0faaf34ef5dc8ff93dff18224a30fe0e1bf9ef0a1f4b235f\": container with ID starting with 04ff999c7a7632eb0faaf34ef5dc8ff93dff18224a30fe0e1bf9ef0a1f4b235f not found: ID does not exist" containerID="04ff999c7a7632eb0faaf34ef5dc8ff93dff18224a30fe0e1bf9ef0a1f4b235f" Dec 10 08:08:01 crc kubenswrapper[4765]: I1210 08:08:01.985957 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04ff999c7a7632eb0faaf34ef5dc8ff93dff18224a30fe0e1bf9ef0a1f4b235f"} err="failed to get container status \"04ff999c7a7632eb0faaf34ef5dc8ff93dff18224a30fe0e1bf9ef0a1f4b235f\": rpc error: code = NotFound desc = could not find container \"04ff999c7a7632eb0faaf34ef5dc8ff93dff18224a30fe0e1bf9ef0a1f4b235f\": container with ID starting with 04ff999c7a7632eb0faaf34ef5dc8ff93dff18224a30fe0e1bf9ef0a1f4b235f not found: ID does not exist" Dec 10 08:08:01 crc kubenswrapper[4765]: I1210 08:08:01.985972 4765 scope.go:117] "RemoveContainer" containerID="9a5477f8de9dd4d02e94d2046036ba7c07d3d265b6cdabd391d0b8678b2897ea" Dec 10 08:08:01 crc kubenswrapper[4765]: E1210 08:08:01.986431 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a5477f8de9dd4d02e94d2046036ba7c07d3d265b6cdabd391d0b8678b2897ea\": container with ID starting with 9a5477f8de9dd4d02e94d2046036ba7c07d3d265b6cdabd391d0b8678b2897ea not found: ID does not exist" containerID="9a5477f8de9dd4d02e94d2046036ba7c07d3d265b6cdabd391d0b8678b2897ea" Dec 10 08:08:01 crc kubenswrapper[4765]: I1210 08:08:01.986459 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a5477f8de9dd4d02e94d2046036ba7c07d3d265b6cdabd391d0b8678b2897ea"} err="failed to get container status \"9a5477f8de9dd4d02e94d2046036ba7c07d3d265b6cdabd391d0b8678b2897ea\": rpc error: code = NotFound desc = could not find container \"9a5477f8de9dd4d02e94d2046036ba7c07d3d265b6cdabd391d0b8678b2897ea\": container with ID starting with 9a5477f8de9dd4d02e94d2046036ba7c07d3d265b6cdabd391d0b8678b2897ea not found: ID does not exist" Dec 10 08:08:01 crc kubenswrapper[4765]: I1210 08:08:01.995923 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tvmw\" (UniqueName: \"kubernetes.io/projected/eef9ed53-f133-442e-93cc-3a37a1daa6e7-kube-api-access-4tvmw\") pod \"eef9ed53-f133-442e-93cc-3a37a1daa6e7\" (UID: \"eef9ed53-f133-442e-93cc-3a37a1daa6e7\") " Dec 10 08:08:01 crc kubenswrapper[4765]: I1210 08:08:01.996047 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eef9ed53-f133-442e-93cc-3a37a1daa6e7-utilities\") pod \"eef9ed53-f133-442e-93cc-3a37a1daa6e7\" (UID: \"eef9ed53-f133-442e-93cc-3a37a1daa6e7\") " Dec 10 08:08:01 crc kubenswrapper[4765]: I1210 08:08:01.996132 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eef9ed53-f133-442e-93cc-3a37a1daa6e7-catalog-content\") pod \"eef9ed53-f133-442e-93cc-3a37a1daa6e7\" (UID: \"eef9ed53-f133-442e-93cc-3a37a1daa6e7\") " Dec 10 08:08:01 crc kubenswrapper[4765]: I1210 08:08:01.996874 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eef9ed53-f133-442e-93cc-3a37a1daa6e7-utilities" (OuterVolumeSpecName: "utilities") pod "eef9ed53-f133-442e-93cc-3a37a1daa6e7" (UID: "eef9ed53-f133-442e-93cc-3a37a1daa6e7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 08:08:02 crc kubenswrapper[4765]: I1210 08:08:02.002326 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eef9ed53-f133-442e-93cc-3a37a1daa6e7-kube-api-access-4tvmw" (OuterVolumeSpecName: "kube-api-access-4tvmw") pod "eef9ed53-f133-442e-93cc-3a37a1daa6e7" (UID: "eef9ed53-f133-442e-93cc-3a37a1daa6e7"). InnerVolumeSpecName "kube-api-access-4tvmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 08:08:02 crc kubenswrapper[4765]: I1210 08:08:02.070479 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eef9ed53-f133-442e-93cc-3a37a1daa6e7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eef9ed53-f133-442e-93cc-3a37a1daa6e7" (UID: "eef9ed53-f133-442e-93cc-3a37a1daa6e7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 08:08:02 crc kubenswrapper[4765]: I1210 08:08:02.098140 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eef9ed53-f133-442e-93cc-3a37a1daa6e7-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 08:08:02 crc kubenswrapper[4765]: I1210 08:08:02.098186 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eef9ed53-f133-442e-93cc-3a37a1daa6e7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 08:08:02 crc kubenswrapper[4765]: I1210 08:08:02.098198 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tvmw\" (UniqueName: \"kubernetes.io/projected/eef9ed53-f133-442e-93cc-3a37a1daa6e7-kube-api-access-4tvmw\") on node \"crc\" DevicePath \"\"" Dec 10 08:08:02 crc kubenswrapper[4765]: I1210 08:08:02.244786 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tk9z5"] Dec 10 08:08:02 crc kubenswrapper[4765]: I1210 08:08:02.251342 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tk9z5"] Dec 10 08:08:02 crc kubenswrapper[4765]: I1210 08:08:02.598752 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eef9ed53-f133-442e-93cc-3a37a1daa6e7" path="/var/lib/kubelet/pods/eef9ed53-f133-442e-93cc-3a37a1daa6e7/volumes" Dec 10 08:08:04 crc kubenswrapper[4765]: I1210 08:08:04.053814 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 08:08:04 crc kubenswrapper[4765]: I1210 08:08:04.054222 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 08:08:04 crc kubenswrapper[4765]: I1210 08:08:04.451642 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57_d6ec8e63-08e7-4e99-94e4-8c14becf803c/util/0.log" Dec 10 08:08:04 crc kubenswrapper[4765]: I1210 08:08:04.638146 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57_d6ec8e63-08e7-4e99-94e4-8c14becf803c/util/0.log" Dec 10 08:08:04 crc kubenswrapper[4765]: I1210 08:08:04.707166 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57_d6ec8e63-08e7-4e99-94e4-8c14becf803c/pull/0.log" Dec 10 08:08:04 crc kubenswrapper[4765]: I1210 08:08:04.718215 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57_d6ec8e63-08e7-4e99-94e4-8c14becf803c/pull/0.log" Dec 10 08:08:04 crc kubenswrapper[4765]: I1210 08:08:04.853623 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57_d6ec8e63-08e7-4e99-94e4-8c14becf803c/util/0.log" Dec 10 08:08:04 crc kubenswrapper[4765]: I1210 08:08:04.907171 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57_d6ec8e63-08e7-4e99-94e4-8c14becf803c/pull/0.log" Dec 10 08:08:04 crc kubenswrapper[4765]: I1210 08:08:04.963772 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a202a8d0d29caf7438df631524a4272602a9619288fa076173728c3fc3r8r57_d6ec8e63-08e7-4e99-94e4-8c14becf803c/extract/0.log" Dec 10 08:08:05 crc kubenswrapper[4765]: I1210 08:08:05.064748 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-mjmkd_f883d81e-6c50-4d92-878d-253a954fcd7a/kube-rbac-proxy/0.log" Dec 10 08:08:05 crc kubenswrapper[4765]: I1210 08:08:05.188738 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-mjmkd_f883d81e-6c50-4d92-878d-253a954fcd7a/manager/0.log" Dec 10 08:08:05 crc kubenswrapper[4765]: I1210 08:08:05.217774 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6c677c69b-kgvfr_1ccf5120-6d6b-49e6-ae2d-08464b3ab398/kube-rbac-proxy/0.log" Dec 10 08:08:05 crc kubenswrapper[4765]: I1210 08:08:05.340650 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6c677c69b-kgvfr_1ccf5120-6d6b-49e6-ae2d-08464b3ab398/manager/0.log" Dec 10 08:08:05 crc kubenswrapper[4765]: I1210 08:08:05.614276 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-697fb699cf-zd5jq_98ff262d-fdd9-4e2e-9cd9-4f570716bf02/manager/0.log" Dec 10 08:08:05 crc kubenswrapper[4765]: I1210 08:08:05.618558 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-697fb699cf-zd5jq_98ff262d-fdd9-4e2e-9cd9-4f570716bf02/kube-rbac-proxy/0.log" Dec 10 08:08:05 crc kubenswrapper[4765]: I1210 08:08:05.851285 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5697bb5779-9jg4f_87a90f47-7e73-45ff-9f98-93bec3ebe12e/kube-rbac-proxy/0.log" Dec 10 08:08:05 crc kubenswrapper[4765]: I1210 08:08:05.888034 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5697bb5779-9jg4f_87a90f47-7e73-45ff-9f98-93bec3ebe12e/manager/0.log" Dec 10 08:08:05 crc kubenswrapper[4765]: I1210 08:08:05.960660 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-hbkmm_eb079565-c2aa-4756-b335-df9a6dac3758/kube-rbac-proxy/0.log" Dec 10 08:08:06 crc kubenswrapper[4765]: I1210 08:08:06.080985 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-hbkmm_eb079565-c2aa-4756-b335-df9a6dac3758/manager/0.log" Dec 10 08:08:06 crc kubenswrapper[4765]: I1210 08:08:06.150953 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-sfbj7_85f78838-e7c3-4c58-8be9-5cb847b81d6d/kube-rbac-proxy/0.log" Dec 10 08:08:06 crc kubenswrapper[4765]: I1210 08:08:06.158513 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-sfbj7_85f78838-e7c3-4c58-8be9-5cb847b81d6d/manager/0.log" Dec 10 08:08:06 crc kubenswrapper[4765]: I1210 08:08:06.298347 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-78d48bff9d-7f8r9_06b9f74b-2726-43cb-9353-dec4e4a34f01/kube-rbac-proxy/0.log" Dec 10 08:08:06 crc kubenswrapper[4765]: I1210 08:08:06.504292 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-967d97867-xqrjv_f8c7e1f9-bf7b-4b50-9ee2-b2e73735e720/kube-rbac-proxy/0.log" Dec 10 08:08:06 crc kubenswrapper[4765]: I1210 08:08:06.561830 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-78d48bff9d-7f8r9_06b9f74b-2726-43cb-9353-dec4e4a34f01/manager/0.log" Dec 10 08:08:06 crc kubenswrapper[4765]: I1210 08:08:06.574898 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-967d97867-xqrjv_f8c7e1f9-bf7b-4b50-9ee2-b2e73735e720/manager/0.log" Dec 10 08:08:06 crc kubenswrapper[4765]: I1210 08:08:06.724552 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-25qhw_f9c242b2-1388-431e-8c76-1022426252c7/kube-rbac-proxy/0.log" Dec 10 08:08:06 crc kubenswrapper[4765]: I1210 08:08:06.788594 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-25qhw_f9c242b2-1388-431e-8c76-1022426252c7/manager/0.log" Dec 10 08:08:06 crc kubenswrapper[4765]: I1210 08:08:06.940331 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5b5fd79c9c-4j6rt_8d2a8965-e959-4d48-bc75-e91d2c235898/manager/0.log" Dec 10 08:08:06 crc kubenswrapper[4765]: I1210 08:08:06.982315 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5b5fd79c9c-4j6rt_8d2a8965-e959-4d48-bc75-e91d2c235898/kube-rbac-proxy/0.log" Dec 10 08:08:07 crc kubenswrapper[4765]: I1210 08:08:07.057922 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-79c8c4686c-mtl8t_9b065504-7b79-4bb0-b583-a37953820f14/kube-rbac-proxy/0.log" Dec 10 08:08:07 crc kubenswrapper[4765]: I1210 08:08:07.172501 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-79c8c4686c-mtl8t_9b065504-7b79-4bb0-b583-a37953820f14/manager/0.log" Dec 10 08:08:07 crc kubenswrapper[4765]: I1210 08:08:07.236278 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-wrjkf_44bbc33f-2848-42f4-b8e9-d99ba69ea07b/manager/0.log" Dec 10 08:08:07 crc kubenswrapper[4765]: I1210 08:08:07.250367 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-wrjkf_44bbc33f-2848-42f4-b8e9-d99ba69ea07b/kube-rbac-proxy/0.log" Dec 10 08:08:07 crc kubenswrapper[4765]: I1210 08:08:07.449920 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-767vs_06116776-17f7-42fd-a55c-8965a8932070/kube-rbac-proxy/0.log" Dec 10 08:08:07 crc kubenswrapper[4765]: I1210 08:08:07.553892 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-767vs_06116776-17f7-42fd-a55c-8965a8932070/manager/0.log" Dec 10 08:08:07 crc kubenswrapper[4765]: I1210 08:08:07.678938 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-tdvd4_23fa0793-f896-4059-b33e-fe00ea97dbab/kube-rbac-proxy/0.log" Dec 10 08:08:07 crc kubenswrapper[4765]: I1210 08:08:07.679480 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-tdvd4_23fa0793-f896-4059-b33e-fe00ea97dbab/manager/0.log" Dec 10 08:08:07 crc kubenswrapper[4765]: I1210 08:08:07.792846 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-694d6cfbd628ppx_32ede12b-66f5-42e9-8e6b-77a6e45c3099/kube-rbac-proxy/0.log" Dec 10 08:08:07 crc kubenswrapper[4765]: I1210 08:08:07.922849 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-694d6cfbd628ppx_32ede12b-66f5-42e9-8e6b-77a6e45c3099/manager/0.log" Dec 10 08:08:08 crc kubenswrapper[4765]: I1210 08:08:08.384740 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-f8s54_23a1b29f-1c77-47e7-ab57-d0539cdbdec0/registry-server/0.log" Dec 10 08:08:08 crc kubenswrapper[4765]: I1210 08:08:08.443657 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7979d445b4-h9kx6_76c02d06-2f30-4291-94eb-7009dd061b2a/operator/0.log" Dec 10 08:08:08 crc kubenswrapper[4765]: I1210 08:08:08.671121 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-g5tv6_eb907950-a4b3-4ba5-bea0-3075610995af/kube-rbac-proxy/0.log" Dec 10 08:08:08 crc kubenswrapper[4765]: I1210 08:08:08.761337 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-p286q_ccb3c4ba-9eab-4b59-85a4-e672a4310cf5/kube-rbac-proxy/0.log" Dec 10 08:08:08 crc kubenswrapper[4765]: I1210 08:08:08.777010 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-g5tv6_eb907950-a4b3-4ba5-bea0-3075610995af/manager/0.log" Dec 10 08:08:08 crc kubenswrapper[4765]: I1210 08:08:08.942497 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-p286q_ccb3c4ba-9eab-4b59-85a4-e672a4310cf5/manager/0.log" Dec 10 08:08:08 crc kubenswrapper[4765]: I1210 08:08:08.995521 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-668858c49-rtprx_b4131e33-88a7-4b19-8ffc-4029eec86cd3/manager/0.log" Dec 10 08:08:09 crc kubenswrapper[4765]: I1210 08:08:09.046229 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-rg5bf_29a0de10-d351-4d7c-9dfa-38e628ce116d/operator/0.log" Dec 10 08:08:09 crc kubenswrapper[4765]: I1210 08:08:09.291400 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-9d58d64bc-pwpkv_dbe2661b-7d79-49f0-9d56-5b66f440670f/kube-rbac-proxy/0.log" Dec 10 08:08:09 crc kubenswrapper[4765]: I1210 08:08:09.409285 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-9d58d64bc-pwpkv_dbe2661b-7d79-49f0-9d56-5b66f440670f/manager/0.log" Dec 10 08:08:09 crc kubenswrapper[4765]: I1210 08:08:09.443798 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-58d5ff84df-kt2js_9a932c52-665f-4162-8f00-afdc61891dc5/kube-rbac-proxy/0.log" Dec 10 08:08:09 crc kubenswrapper[4765]: I1210 08:08:09.470528 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-58d5ff84df-kt2js_9a932c52-665f-4162-8f00-afdc61891dc5/manager/0.log" Dec 10 08:08:09 crc kubenswrapper[4765]: I1210 08:08:09.602241 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-w92jp_68b73b9b-6bcb-4d42-a879-36107b59e8a8/manager/0.log" Dec 10 08:08:09 crc kubenswrapper[4765]: I1210 08:08:09.607179 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-w92jp_68b73b9b-6bcb-4d42-a879-36107b59e8a8/kube-rbac-proxy/0.log" Dec 10 08:08:09 crc kubenswrapper[4765]: I1210 08:08:09.675630 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-667bd8d554-dxgfv_b9667d31-f2e0-4172-bdc6-c35854e9f81a/kube-rbac-proxy/0.log" Dec 10 08:08:09 crc kubenswrapper[4765]: I1210 08:08:09.782019 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-667bd8d554-dxgfv_b9667d31-f2e0-4172-bdc6-c35854e9f81a/manager/0.log" Dec 10 08:08:29 crc kubenswrapper[4765]: I1210 08:08:29.737702 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-j692h_bf9a4b55-b2dd-497c-aee6-c2fa241d5b33/control-plane-machine-set-operator/0.log" Dec 10 08:08:29 crc kubenswrapper[4765]: I1210 08:08:29.924395 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9rf4c_e33fd3b0-8406-4675-91fc-ed8b1b3e7cef/kube-rbac-proxy/0.log" Dec 10 08:08:30 crc kubenswrapper[4765]: I1210 08:08:30.022435 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9rf4c_e33fd3b0-8406-4675-91fc-ed8b1b3e7cef/machine-api-operator/0.log" Dec 10 08:08:34 crc kubenswrapper[4765]: I1210 08:08:34.048967 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 08:08:34 crc kubenswrapper[4765]: I1210 08:08:34.049665 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 08:08:42 crc kubenswrapper[4765]: I1210 08:08:42.503186 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-6zdfw_f2b749dd-e42d-417e-ae85-f66e905aba81/cert-manager-controller/0.log" Dec 10 08:08:42 crc kubenswrapper[4765]: I1210 08:08:42.710790 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-8g9t8_b6f56e46-2706-42ef-bfd1-249520fb4036/cert-manager-cainjector/0.log" Dec 10 08:08:42 crc kubenswrapper[4765]: I1210 08:08:42.735065 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-5jbtc_8659183f-8540-44c3-9040-f8b2490375cb/cert-manager-webhook/0.log" Dec 10 08:08:54 crc kubenswrapper[4765]: I1210 08:08:54.412833 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-4vsz5_9ed9df09-dada-4084-8201-d969872b21d7/nmstate-console-plugin/0.log" Dec 10 08:08:54 crc kubenswrapper[4765]: I1210 08:08:54.506152 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-t5v5s_82aebff8-bbbf-404f-a932-e336e4c46383/nmstate-handler/0.log" Dec 10 08:08:54 crc kubenswrapper[4765]: I1210 08:08:54.606210 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-cwjdq_b741a1af-d805-40d7-ad55-af348e32753d/kube-rbac-proxy/0.log" Dec 10 08:08:54 crc kubenswrapper[4765]: I1210 08:08:54.651357 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-cwjdq_b741a1af-d805-40d7-ad55-af348e32753d/nmstate-metrics/0.log" Dec 10 08:08:54 crc kubenswrapper[4765]: I1210 08:08:54.780463 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-pz966_fd1bc451-a918-4dce-89d6-81b1d7950663/nmstate-operator/0.log" Dec 10 08:08:54 crc kubenswrapper[4765]: I1210 08:08:54.852494 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-k2zln_d4218171-d405-4118-9535-737b242c9453/nmstate-webhook/0.log" Dec 10 08:09:04 crc kubenswrapper[4765]: I1210 08:09:04.049067 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 08:09:04 crc kubenswrapper[4765]: I1210 08:09:04.049727 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 08:09:04 crc kubenswrapper[4765]: I1210 08:09:04.049784 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 08:09:04 crc kubenswrapper[4765]: I1210 08:09:04.050607 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"36428c9b79bcd275e10b5f0f6d13bce947dacd2ed8442b694ef4b92da1180efd"} pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 08:09:04 crc kubenswrapper[4765]: I1210 08:09:04.050673 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" containerID="cri-o://36428c9b79bcd275e10b5f0f6d13bce947dacd2ed8442b694ef4b92da1180efd" gracePeriod=600 Dec 10 08:09:04 crc kubenswrapper[4765]: I1210 08:09:04.403784 4765 generic.go:334] "Generic (PLEG): container finished" podID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerID="36428c9b79bcd275e10b5f0f6d13bce947dacd2ed8442b694ef4b92da1180efd" exitCode=0 Dec 10 08:09:04 crc kubenswrapper[4765]: I1210 08:09:04.403840 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerDied","Data":"36428c9b79bcd275e10b5f0f6d13bce947dacd2ed8442b694ef4b92da1180efd"} Dec 10 08:09:04 crc kubenswrapper[4765]: I1210 08:09:04.404100 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerStarted","Data":"c88d51964b2207dc5d35d2de1788775a0c483596c0b2b783bdb58cfed6375086"} Dec 10 08:09:04 crc kubenswrapper[4765]: I1210 08:09:04.404218 4765 scope.go:117] "RemoveContainer" containerID="eba566723085e1698581628d04a17db700b6fab5490e2261a223ba210d8df286" Dec 10 08:09:08 crc kubenswrapper[4765]: I1210 08:09:08.390137 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-c7l82_e24b600b-dc29-47be-9e85-da8a5ac84860/kube-rbac-proxy/0.log" Dec 10 08:09:08 crc kubenswrapper[4765]: I1210 08:09:08.598570 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-c7l82_e24b600b-dc29-47be-9e85-da8a5ac84860/controller/0.log" Dec 10 08:09:08 crc kubenswrapper[4765]: I1210 08:09:08.616370 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k8jnb_85f0f1bc-04a6-497e-8781-6be917b5be98/cp-frr-files/0.log" Dec 10 08:09:08 crc kubenswrapper[4765]: I1210 08:09:08.776297 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k8jnb_85f0f1bc-04a6-497e-8781-6be917b5be98/cp-metrics/0.log" Dec 10 08:09:08 crc kubenswrapper[4765]: I1210 08:09:08.801051 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k8jnb_85f0f1bc-04a6-497e-8781-6be917b5be98/cp-reloader/0.log" Dec 10 08:09:08 crc kubenswrapper[4765]: I1210 08:09:08.813969 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k8jnb_85f0f1bc-04a6-497e-8781-6be917b5be98/cp-frr-files/0.log" Dec 10 08:09:08 crc kubenswrapper[4765]: I1210 08:09:08.814059 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k8jnb_85f0f1bc-04a6-497e-8781-6be917b5be98/cp-reloader/0.log" Dec 10 08:09:08 crc kubenswrapper[4765]: I1210 08:09:08.986888 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k8jnb_85f0f1bc-04a6-497e-8781-6be917b5be98/cp-frr-files/0.log" Dec 10 08:09:09 crc kubenswrapper[4765]: I1210 08:09:09.007309 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k8jnb_85f0f1bc-04a6-497e-8781-6be917b5be98/cp-reloader/0.log" Dec 10 08:09:09 crc kubenswrapper[4765]: I1210 08:09:09.011599 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k8jnb_85f0f1bc-04a6-497e-8781-6be917b5be98/cp-metrics/0.log" Dec 10 08:09:09 crc kubenswrapper[4765]: I1210 08:09:09.011951 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k8jnb_85f0f1bc-04a6-497e-8781-6be917b5be98/cp-metrics/0.log" Dec 10 08:09:09 crc kubenswrapper[4765]: I1210 08:09:09.190539 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k8jnb_85f0f1bc-04a6-497e-8781-6be917b5be98/cp-reloader/0.log" Dec 10 08:09:09 crc kubenswrapper[4765]: I1210 08:09:09.199534 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k8jnb_85f0f1bc-04a6-497e-8781-6be917b5be98/cp-metrics/0.log" Dec 10 08:09:09 crc kubenswrapper[4765]: I1210 08:09:09.199555 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k8jnb_85f0f1bc-04a6-497e-8781-6be917b5be98/cp-frr-files/0.log" Dec 10 08:09:09 crc kubenswrapper[4765]: I1210 08:09:09.269013 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k8jnb_85f0f1bc-04a6-497e-8781-6be917b5be98/controller/0.log" Dec 10 08:09:09 crc kubenswrapper[4765]: I1210 08:09:09.373129 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k8jnb_85f0f1bc-04a6-497e-8781-6be917b5be98/frr-metrics/0.log" Dec 10 08:09:09 crc kubenswrapper[4765]: I1210 08:09:09.452133 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k8jnb_85f0f1bc-04a6-497e-8781-6be917b5be98/kube-rbac-proxy/0.log" Dec 10 08:09:09 crc kubenswrapper[4765]: I1210 08:09:09.502101 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k8jnb_85f0f1bc-04a6-497e-8781-6be917b5be98/kube-rbac-proxy-frr/0.log" Dec 10 08:09:09 crc kubenswrapper[4765]: I1210 08:09:09.652240 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k8jnb_85f0f1bc-04a6-497e-8781-6be917b5be98/reloader/0.log" Dec 10 08:09:09 crc kubenswrapper[4765]: I1210 08:09:09.761514 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-p2jnh_d9898042-3b0b-4505-bb28-d9baf6939ee1/frr-k8s-webhook-server/0.log" Dec 10 08:09:09 crc kubenswrapper[4765]: I1210 08:09:09.941732 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-b9b79bb5f-zds8b_165a5bae-361a-45b3-8a95-9b121b537acb/manager/0.log" Dec 10 08:09:10 crc kubenswrapper[4765]: I1210 08:09:10.119240 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5576b55846-vxnvw_e7043cbc-6521-4a96-8324-cf79f32ca135/webhook-server/0.log" Dec 10 08:09:10 crc kubenswrapper[4765]: I1210 08:09:10.214396 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-kw6d9_31d5b3e4-ecf0-4eb3-a434-5a27643f60c6/kube-rbac-proxy/0.log" Dec 10 08:09:10 crc kubenswrapper[4765]: I1210 08:09:10.893532 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-kw6d9_31d5b3e4-ecf0-4eb3-a434-5a27643f60c6/speaker/0.log" Dec 10 08:09:11 crc kubenswrapper[4765]: I1210 08:09:11.041257 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k8jnb_85f0f1bc-04a6-497e-8781-6be917b5be98/frr/0.log" Dec 10 08:09:22 crc kubenswrapper[4765]: I1210 08:09:22.293359 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s_3c596d4a-2d7a-4297-834e-183a6a272ec5/util/0.log" Dec 10 08:09:22 crc kubenswrapper[4765]: I1210 08:09:22.431246 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s_3c596d4a-2d7a-4297-834e-183a6a272ec5/util/0.log" Dec 10 08:09:22 crc kubenswrapper[4765]: I1210 08:09:22.550571 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s_3c596d4a-2d7a-4297-834e-183a6a272ec5/pull/0.log" Dec 10 08:09:22 crc kubenswrapper[4765]: I1210 08:09:22.553017 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s_3c596d4a-2d7a-4297-834e-183a6a272ec5/pull/0.log" Dec 10 08:09:22 crc kubenswrapper[4765]: I1210 08:09:22.760226 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s_3c596d4a-2d7a-4297-834e-183a6a272ec5/util/0.log" Dec 10 08:09:22 crc kubenswrapper[4765]: I1210 08:09:22.809393 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s_3c596d4a-2d7a-4297-834e-183a6a272ec5/extract/0.log" Dec 10 08:09:22 crc kubenswrapper[4765]: I1210 08:09:22.837995 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931akkv7s_3c596d4a-2d7a-4297-834e-183a6a272ec5/pull/0.log" Dec 10 08:09:22 crc kubenswrapper[4765]: I1210 08:09:22.932578 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl_e54ad43d-17ff-4bcc-b0a2-e328c40c83f5/util/0.log" Dec 10 08:09:23 crc kubenswrapper[4765]: I1210 08:09:23.161655 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl_e54ad43d-17ff-4bcc-b0a2-e328c40c83f5/util/0.log" Dec 10 08:09:23 crc kubenswrapper[4765]: I1210 08:09:23.166562 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl_e54ad43d-17ff-4bcc-b0a2-e328c40c83f5/pull/0.log" Dec 10 08:09:23 crc kubenswrapper[4765]: I1210 08:09:23.192313 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl_e54ad43d-17ff-4bcc-b0a2-e328c40c83f5/pull/0.log" Dec 10 08:09:23 crc kubenswrapper[4765]: I1210 08:09:23.352398 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl_e54ad43d-17ff-4bcc-b0a2-e328c40c83f5/util/0.log" Dec 10 08:09:23 crc kubenswrapper[4765]: I1210 08:09:23.356725 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl_e54ad43d-17ff-4bcc-b0a2-e328c40c83f5/extract/0.log" Dec 10 08:09:23 crc kubenswrapper[4765]: I1210 08:09:23.370881 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fzvfvl_e54ad43d-17ff-4bcc-b0a2-e328c40c83f5/pull/0.log" Dec 10 08:09:23 crc kubenswrapper[4765]: I1210 08:09:23.527818 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5_8a6ab63c-576c-47a1-ad2a-346353164954/util/0.log" Dec 10 08:09:23 crc kubenswrapper[4765]: I1210 08:09:23.746295 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5_8a6ab63c-576c-47a1-ad2a-346353164954/util/0.log" Dec 10 08:09:23 crc kubenswrapper[4765]: I1210 08:09:23.759529 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5_8a6ab63c-576c-47a1-ad2a-346353164954/pull/0.log" Dec 10 08:09:23 crc kubenswrapper[4765]: I1210 08:09:23.781565 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5_8a6ab63c-576c-47a1-ad2a-346353164954/pull/0.log" Dec 10 08:09:23 crc kubenswrapper[4765]: I1210 08:09:23.976588 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5_8a6ab63c-576c-47a1-ad2a-346353164954/pull/0.log" Dec 10 08:09:23 crc kubenswrapper[4765]: I1210 08:09:23.985905 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5_8a6ab63c-576c-47a1-ad2a-346353164954/extract/0.log" Dec 10 08:09:24 crc kubenswrapper[4765]: I1210 08:09:24.013290 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8394lv5_8a6ab63c-576c-47a1-ad2a-346353164954/util/0.log" Dec 10 08:09:24 crc kubenswrapper[4765]: I1210 08:09:24.199353 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-grp4g_2b5f2726-04c7-41c4-ab5c-5eb64062c107/extract-utilities/0.log" Dec 10 08:09:24 crc kubenswrapper[4765]: I1210 08:09:24.388636 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-grp4g_2b5f2726-04c7-41c4-ab5c-5eb64062c107/extract-content/0.log" Dec 10 08:09:24 crc kubenswrapper[4765]: I1210 08:09:24.396330 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-grp4g_2b5f2726-04c7-41c4-ab5c-5eb64062c107/extract-utilities/0.log" Dec 10 08:09:24 crc kubenswrapper[4765]: I1210 08:09:24.399109 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-grp4g_2b5f2726-04c7-41c4-ab5c-5eb64062c107/extract-content/0.log" Dec 10 08:09:24 crc kubenswrapper[4765]: I1210 08:09:24.834467 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-grp4g_2b5f2726-04c7-41c4-ab5c-5eb64062c107/extract-utilities/0.log" Dec 10 08:09:24 crc kubenswrapper[4765]: I1210 08:09:24.893232 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-grp4g_2b5f2726-04c7-41c4-ab5c-5eb64062c107/extract-content/0.log" Dec 10 08:09:24 crc kubenswrapper[4765]: I1210 08:09:24.969798 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-grp4g_2b5f2726-04c7-41c4-ab5c-5eb64062c107/registry-server/0.log" Dec 10 08:09:25 crc kubenswrapper[4765]: I1210 08:09:25.042885 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-khqgp_3c5d628e-14a2-41d3-9d0b-b13465015fac/extract-utilities/0.log" Dec 10 08:09:25 crc kubenswrapper[4765]: I1210 08:09:25.251436 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-khqgp_3c5d628e-14a2-41d3-9d0b-b13465015fac/extract-utilities/0.log" Dec 10 08:09:25 crc kubenswrapper[4765]: I1210 08:09:25.318793 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-khqgp_3c5d628e-14a2-41d3-9d0b-b13465015fac/extract-content/0.log" Dec 10 08:09:25 crc kubenswrapper[4765]: I1210 08:09:25.326620 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-khqgp_3c5d628e-14a2-41d3-9d0b-b13465015fac/extract-content/0.log" Dec 10 08:09:25 crc kubenswrapper[4765]: I1210 08:09:25.516652 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-khqgp_3c5d628e-14a2-41d3-9d0b-b13465015fac/extract-content/0.log" Dec 10 08:09:25 crc kubenswrapper[4765]: I1210 08:09:25.535672 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-khqgp_3c5d628e-14a2-41d3-9d0b-b13465015fac/extract-utilities/0.log" Dec 10 08:09:25 crc kubenswrapper[4765]: I1210 08:09:25.755917 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hq7lm_14816080-6b8f-4858-9f45-636bfc8110bf/extract-utilities/0.log" Dec 10 08:09:26 crc kubenswrapper[4765]: I1210 08:09:26.015965 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hq7lm_14816080-6b8f-4858-9f45-636bfc8110bf/extract-utilities/0.log" Dec 10 08:09:26 crc kubenswrapper[4765]: I1210 08:09:26.044134 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-khqgp_3c5d628e-14a2-41d3-9d0b-b13465015fac/registry-server/0.log" Dec 10 08:09:26 crc kubenswrapper[4765]: I1210 08:09:26.064320 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hq7lm_14816080-6b8f-4858-9f45-636bfc8110bf/extract-content/0.log" Dec 10 08:09:26 crc kubenswrapper[4765]: I1210 08:09:26.104314 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hq7lm_14816080-6b8f-4858-9f45-636bfc8110bf/extract-content/0.log" Dec 10 08:09:26 crc kubenswrapper[4765]: I1210 08:09:26.224611 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hq7lm_14816080-6b8f-4858-9f45-636bfc8110bf/extract-content/0.log" Dec 10 08:09:26 crc kubenswrapper[4765]: I1210 08:09:26.226048 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hq7lm_14816080-6b8f-4858-9f45-636bfc8110bf/extract-utilities/0.log" Dec 10 08:09:26 crc kubenswrapper[4765]: I1210 08:09:26.341554 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-sxwbc_e5f2035f-97a3-4b63-ac9d-42ed0b201eec/marketplace-operator/0.log" Dec 10 08:09:26 crc kubenswrapper[4765]: I1210 08:09:26.495668 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hq7lm_14816080-6b8f-4858-9f45-636bfc8110bf/registry-server/0.log" Dec 10 08:09:26 crc kubenswrapper[4765]: I1210 08:09:26.539348 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-98gbr_ea399b94-8696-405b-956e-a807aca44b1c/extract-utilities/0.log" Dec 10 08:09:26 crc kubenswrapper[4765]: I1210 08:09:26.665174 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-98gbr_ea399b94-8696-405b-956e-a807aca44b1c/extract-utilities/0.log" Dec 10 08:09:26 crc kubenswrapper[4765]: I1210 08:09:26.726498 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-98gbr_ea399b94-8696-405b-956e-a807aca44b1c/extract-content/0.log" Dec 10 08:09:26 crc kubenswrapper[4765]: I1210 08:09:26.729837 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-98gbr_ea399b94-8696-405b-956e-a807aca44b1c/extract-content/0.log" Dec 10 08:09:26 crc kubenswrapper[4765]: I1210 08:09:26.909762 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-98gbr_ea399b94-8696-405b-956e-a807aca44b1c/extract-content/0.log" Dec 10 08:09:26 crc kubenswrapper[4765]: I1210 08:09:26.955601 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-98gbr_ea399b94-8696-405b-956e-a807aca44b1c/extract-utilities/0.log" Dec 10 08:09:27 crc kubenswrapper[4765]: I1210 08:09:27.001014 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-swxql_c0877bed-7b01-43a1-8704-eba99bb1e38d/extract-utilities/0.log" Dec 10 08:09:27 crc kubenswrapper[4765]: I1210 08:09:27.074821 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-98gbr_ea399b94-8696-405b-956e-a807aca44b1c/registry-server/0.log" Dec 10 08:09:27 crc kubenswrapper[4765]: I1210 08:09:27.141652 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-swxql_c0877bed-7b01-43a1-8704-eba99bb1e38d/extract-utilities/0.log" Dec 10 08:09:27 crc kubenswrapper[4765]: I1210 08:09:27.161907 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-swxql_c0877bed-7b01-43a1-8704-eba99bb1e38d/extract-content/0.log" Dec 10 08:09:27 crc kubenswrapper[4765]: I1210 08:09:27.218652 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-swxql_c0877bed-7b01-43a1-8704-eba99bb1e38d/extract-content/0.log" Dec 10 08:09:27 crc kubenswrapper[4765]: I1210 08:09:27.422401 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-swxql_c0877bed-7b01-43a1-8704-eba99bb1e38d/extract-content/0.log" Dec 10 08:09:27 crc kubenswrapper[4765]: I1210 08:09:27.425446 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-swxql_c0877bed-7b01-43a1-8704-eba99bb1e38d/extract-utilities/0.log" Dec 10 08:09:28 crc kubenswrapper[4765]: I1210 08:09:28.030683 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-swxql_c0877bed-7b01-43a1-8704-eba99bb1e38d/registry-server/0.log" Dec 10 08:10:40 crc kubenswrapper[4765]: I1210 08:10:40.101770 4765 generic.go:334] "Generic (PLEG): container finished" podID="f5fe2052-71f7-4aa3-a719-de45bd724fb3" containerID="2bdc3c8f9e0bb315dcf24204b5cab4779d09a6c34cdd2edd8396b40c148f4354" exitCode=0 Dec 10 08:10:40 crc kubenswrapper[4765]: I1210 08:10:40.101849 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-v29rk/must-gather-zbnvv" event={"ID":"f5fe2052-71f7-4aa3-a719-de45bd724fb3","Type":"ContainerDied","Data":"2bdc3c8f9e0bb315dcf24204b5cab4779d09a6c34cdd2edd8396b40c148f4354"} Dec 10 08:10:40 crc kubenswrapper[4765]: I1210 08:10:40.103016 4765 scope.go:117] "RemoveContainer" containerID="2bdc3c8f9e0bb315dcf24204b5cab4779d09a6c34cdd2edd8396b40c148f4354" Dec 10 08:10:41 crc kubenswrapper[4765]: I1210 08:10:41.095999 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-v29rk_must-gather-zbnvv_f5fe2052-71f7-4aa3-a719-de45bd724fb3/gather/0.log" Dec 10 08:10:48 crc kubenswrapper[4765]: I1210 08:10:48.837368 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-v29rk/must-gather-zbnvv"] Dec 10 08:10:48 crc kubenswrapper[4765]: I1210 08:10:48.838330 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-v29rk/must-gather-zbnvv" podUID="f5fe2052-71f7-4aa3-a719-de45bd724fb3" containerName="copy" containerID="cri-o://15e9069211c8ee280609756e12c3b86da9c2242833e50dcd936d0067819cd8a2" gracePeriod=2 Dec 10 08:10:48 crc kubenswrapper[4765]: I1210 08:10:48.849163 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-v29rk/must-gather-zbnvv"] Dec 10 08:10:49 crc kubenswrapper[4765]: I1210 08:10:49.172364 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-v29rk_must-gather-zbnvv_f5fe2052-71f7-4aa3-a719-de45bd724fb3/copy/0.log" Dec 10 08:10:49 crc kubenswrapper[4765]: I1210 08:10:49.172916 4765 generic.go:334] "Generic (PLEG): container finished" podID="f5fe2052-71f7-4aa3-a719-de45bd724fb3" containerID="15e9069211c8ee280609756e12c3b86da9c2242833e50dcd936d0067819cd8a2" exitCode=143 Dec 10 08:10:49 crc kubenswrapper[4765]: I1210 08:10:49.172981 4765 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c81f6e260fa58aa37e9c5dfa4fbcfbb211391e045ac95a20c422aa163655208" Dec 10 08:10:49 crc kubenswrapper[4765]: I1210 08:10:49.210288 4765 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-v29rk_must-gather-zbnvv_f5fe2052-71f7-4aa3-a719-de45bd724fb3/copy/0.log" Dec 10 08:10:49 crc kubenswrapper[4765]: I1210 08:10:49.212001 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-v29rk/must-gather-zbnvv" Dec 10 08:10:49 crc kubenswrapper[4765]: I1210 08:10:49.373225 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2msd5\" (UniqueName: \"kubernetes.io/projected/f5fe2052-71f7-4aa3-a719-de45bd724fb3-kube-api-access-2msd5\") pod \"f5fe2052-71f7-4aa3-a719-de45bd724fb3\" (UID: \"f5fe2052-71f7-4aa3-a719-de45bd724fb3\") " Dec 10 08:10:49 crc kubenswrapper[4765]: I1210 08:10:49.373294 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f5fe2052-71f7-4aa3-a719-de45bd724fb3-must-gather-output\") pod \"f5fe2052-71f7-4aa3-a719-de45bd724fb3\" (UID: \"f5fe2052-71f7-4aa3-a719-de45bd724fb3\") " Dec 10 08:10:49 crc kubenswrapper[4765]: I1210 08:10:49.379947 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5fe2052-71f7-4aa3-a719-de45bd724fb3-kube-api-access-2msd5" (OuterVolumeSpecName: "kube-api-access-2msd5") pod "f5fe2052-71f7-4aa3-a719-de45bd724fb3" (UID: "f5fe2052-71f7-4aa3-a719-de45bd724fb3"). InnerVolumeSpecName "kube-api-access-2msd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 08:10:49 crc kubenswrapper[4765]: I1210 08:10:49.473458 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5fe2052-71f7-4aa3-a719-de45bd724fb3-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "f5fe2052-71f7-4aa3-a719-de45bd724fb3" (UID: "f5fe2052-71f7-4aa3-a719-de45bd724fb3"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 08:10:49 crc kubenswrapper[4765]: I1210 08:10:49.475127 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2msd5\" (UniqueName: \"kubernetes.io/projected/f5fe2052-71f7-4aa3-a719-de45bd724fb3-kube-api-access-2msd5\") on node \"crc\" DevicePath \"\"" Dec 10 08:10:49 crc kubenswrapper[4765]: I1210 08:10:49.475158 4765 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f5fe2052-71f7-4aa3-a719-de45bd724fb3-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 10 08:10:50 crc kubenswrapper[4765]: I1210 08:10:50.181108 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-v29rk/must-gather-zbnvv" Dec 10 08:10:50 crc kubenswrapper[4765]: I1210 08:10:50.599729 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5fe2052-71f7-4aa3-a719-de45bd724fb3" path="/var/lib/kubelet/pods/f5fe2052-71f7-4aa3-a719-de45bd724fb3/volumes" Dec 10 08:11:04 crc kubenswrapper[4765]: I1210 08:11:04.049589 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 08:11:04 crc kubenswrapper[4765]: I1210 08:11:04.050178 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 08:11:34 crc kubenswrapper[4765]: I1210 08:11:34.049494 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 08:11:34 crc kubenswrapper[4765]: I1210 08:11:34.049936 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 08:11:38 crc kubenswrapper[4765]: I1210 08:11:38.892404 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zxvvj"] Dec 10 08:11:38 crc kubenswrapper[4765]: E1210 08:11:38.893414 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5fe2052-71f7-4aa3-a719-de45bd724fb3" containerName="copy" Dec 10 08:11:38 crc kubenswrapper[4765]: I1210 08:11:38.893430 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5fe2052-71f7-4aa3-a719-de45bd724fb3" containerName="copy" Dec 10 08:11:38 crc kubenswrapper[4765]: E1210 08:11:38.893442 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eef9ed53-f133-442e-93cc-3a37a1daa6e7" containerName="extract-utilities" Dec 10 08:11:38 crc kubenswrapper[4765]: I1210 08:11:38.893449 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="eef9ed53-f133-442e-93cc-3a37a1daa6e7" containerName="extract-utilities" Dec 10 08:11:38 crc kubenswrapper[4765]: E1210 08:11:38.893492 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eef9ed53-f133-442e-93cc-3a37a1daa6e7" containerName="extract-content" Dec 10 08:11:38 crc kubenswrapper[4765]: I1210 08:11:38.893501 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="eef9ed53-f133-442e-93cc-3a37a1daa6e7" containerName="extract-content" Dec 10 08:11:38 crc kubenswrapper[4765]: E1210 08:11:38.893513 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eef9ed53-f133-442e-93cc-3a37a1daa6e7" containerName="registry-server" Dec 10 08:11:38 crc kubenswrapper[4765]: I1210 08:11:38.893521 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="eef9ed53-f133-442e-93cc-3a37a1daa6e7" containerName="registry-server" Dec 10 08:11:38 crc kubenswrapper[4765]: E1210 08:11:38.893532 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5fe2052-71f7-4aa3-a719-de45bd724fb3" containerName="gather" Dec 10 08:11:38 crc kubenswrapper[4765]: I1210 08:11:38.893540 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5fe2052-71f7-4aa3-a719-de45bd724fb3" containerName="gather" Dec 10 08:11:38 crc kubenswrapper[4765]: I1210 08:11:38.893727 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="eef9ed53-f133-442e-93cc-3a37a1daa6e7" containerName="registry-server" Dec 10 08:11:38 crc kubenswrapper[4765]: I1210 08:11:38.893741 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5fe2052-71f7-4aa3-a719-de45bd724fb3" containerName="gather" Dec 10 08:11:38 crc kubenswrapper[4765]: I1210 08:11:38.893752 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5fe2052-71f7-4aa3-a719-de45bd724fb3" containerName="copy" Dec 10 08:11:38 crc kubenswrapper[4765]: I1210 08:11:38.895239 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zxvvj" Dec 10 08:11:38 crc kubenswrapper[4765]: I1210 08:11:38.905595 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zxvvj"] Dec 10 08:11:38 crc kubenswrapper[4765]: I1210 08:11:38.944431 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1-catalog-content\") pod \"redhat-operators-zxvvj\" (UID: \"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1\") " pod="openshift-marketplace/redhat-operators-zxvvj" Dec 10 08:11:38 crc kubenswrapper[4765]: I1210 08:11:38.944533 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1-utilities\") pod \"redhat-operators-zxvvj\" (UID: \"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1\") " pod="openshift-marketplace/redhat-operators-zxvvj" Dec 10 08:11:38 crc kubenswrapper[4765]: I1210 08:11:38.944572 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwnc9\" (UniqueName: \"kubernetes.io/projected/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1-kube-api-access-kwnc9\") pod \"redhat-operators-zxvvj\" (UID: \"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1\") " pod="openshift-marketplace/redhat-operators-zxvvj" Dec 10 08:11:39 crc kubenswrapper[4765]: I1210 08:11:39.046270 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwnc9\" (UniqueName: \"kubernetes.io/projected/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1-kube-api-access-kwnc9\") pod \"redhat-operators-zxvvj\" (UID: \"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1\") " pod="openshift-marketplace/redhat-operators-zxvvj" Dec 10 08:11:39 crc kubenswrapper[4765]: I1210 08:11:39.046429 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1-catalog-content\") pod \"redhat-operators-zxvvj\" (UID: \"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1\") " pod="openshift-marketplace/redhat-operators-zxvvj" Dec 10 08:11:39 crc kubenswrapper[4765]: I1210 08:11:39.047041 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1-catalog-content\") pod \"redhat-operators-zxvvj\" (UID: \"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1\") " pod="openshift-marketplace/redhat-operators-zxvvj" Dec 10 08:11:39 crc kubenswrapper[4765]: I1210 08:11:39.047444 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1-utilities\") pod \"redhat-operators-zxvvj\" (UID: \"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1\") " pod="openshift-marketplace/redhat-operators-zxvvj" Dec 10 08:11:39 crc kubenswrapper[4765]: I1210 08:11:39.047489 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1-utilities\") pod \"redhat-operators-zxvvj\" (UID: \"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1\") " pod="openshift-marketplace/redhat-operators-zxvvj" Dec 10 08:11:39 crc kubenswrapper[4765]: I1210 08:11:39.066262 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwnc9\" (UniqueName: \"kubernetes.io/projected/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1-kube-api-access-kwnc9\") pod \"redhat-operators-zxvvj\" (UID: \"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1\") " pod="openshift-marketplace/redhat-operators-zxvvj" Dec 10 08:11:39 crc kubenswrapper[4765]: I1210 08:11:39.216699 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zxvvj" Dec 10 08:11:39 crc kubenswrapper[4765]: I1210 08:11:39.659578 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zxvvj"] Dec 10 08:11:40 crc kubenswrapper[4765]: I1210 08:11:40.580214 4765 generic.go:334] "Generic (PLEG): container finished" podID="9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1" containerID="266e2916a7b013e4177e4e68240b87eb42aaafb64d4857f8ba55a9b0d82a4113" exitCode=0 Dec 10 08:11:40 crc kubenswrapper[4765]: I1210 08:11:40.580269 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxvvj" event={"ID":"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1","Type":"ContainerDied","Data":"266e2916a7b013e4177e4e68240b87eb42aaafb64d4857f8ba55a9b0d82a4113"} Dec 10 08:11:40 crc kubenswrapper[4765]: I1210 08:11:40.580329 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxvvj" event={"ID":"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1","Type":"ContainerStarted","Data":"8c277ac13a42812f35dcd61be112beed2a98fe84bc4048cf01103a29782eae06"} Dec 10 08:11:40 crc kubenswrapper[4765]: I1210 08:11:40.582306 4765 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 08:11:41 crc kubenswrapper[4765]: I1210 08:11:41.592137 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxvvj" event={"ID":"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1","Type":"ContainerStarted","Data":"3adcb8cdcec31bd0a8856f7ee9d9105561e7bd2d2673b5201d46b48aa6ea6f13"} Dec 10 08:11:42 crc kubenswrapper[4765]: I1210 08:11:42.602588 4765 generic.go:334] "Generic (PLEG): container finished" podID="9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1" containerID="3adcb8cdcec31bd0a8856f7ee9d9105561e7bd2d2673b5201d46b48aa6ea6f13" exitCode=0 Dec 10 08:11:42 crc kubenswrapper[4765]: I1210 08:11:42.602966 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxvvj" event={"ID":"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1","Type":"ContainerDied","Data":"3adcb8cdcec31bd0a8856f7ee9d9105561e7bd2d2673b5201d46b48aa6ea6f13"} Dec 10 08:11:43 crc kubenswrapper[4765]: I1210 08:11:43.615914 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxvvj" event={"ID":"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1","Type":"ContainerStarted","Data":"d2d5d1ab70a5eae5390f7725ad17667fe71100bb88975d41435bbedfde56a0d1"} Dec 10 08:11:43 crc kubenswrapper[4765]: I1210 08:11:43.633568 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zxvvj" podStartSLOduration=3.059143192 podStartE2EDuration="5.633539149s" podCreationTimestamp="2025-12-10 08:11:38 +0000 UTC" firstStartedPulling="2025-12-10 08:11:40.581972794 +0000 UTC m=+5020.308638110" lastFinishedPulling="2025-12-10 08:11:43.156368751 +0000 UTC m=+5022.883034067" observedRunningTime="2025-12-10 08:11:43.630215864 +0000 UTC m=+5023.356881180" watchObservedRunningTime="2025-12-10 08:11:43.633539149 +0000 UTC m=+5023.360204475" Dec 10 08:11:49 crc kubenswrapper[4765]: I1210 08:11:49.217262 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zxvvj" Dec 10 08:11:49 crc kubenswrapper[4765]: I1210 08:11:49.217879 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zxvvj" Dec 10 08:11:49 crc kubenswrapper[4765]: I1210 08:11:49.258387 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zxvvj" Dec 10 08:11:49 crc kubenswrapper[4765]: I1210 08:11:49.701892 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zxvvj" Dec 10 08:11:49 crc kubenswrapper[4765]: I1210 08:11:49.747605 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zxvvj"] Dec 10 08:11:51 crc kubenswrapper[4765]: I1210 08:11:51.675314 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zxvvj" podUID="9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1" containerName="registry-server" containerID="cri-o://d2d5d1ab70a5eae5390f7725ad17667fe71100bb88975d41435bbedfde56a0d1" gracePeriod=2 Dec 10 08:11:53 crc kubenswrapper[4765]: I1210 08:11:53.694982 4765 generic.go:334] "Generic (PLEG): container finished" podID="9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1" containerID="d2d5d1ab70a5eae5390f7725ad17667fe71100bb88975d41435bbedfde56a0d1" exitCode=0 Dec 10 08:11:53 crc kubenswrapper[4765]: I1210 08:11:53.695173 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxvvj" event={"ID":"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1","Type":"ContainerDied","Data":"d2d5d1ab70a5eae5390f7725ad17667fe71100bb88975d41435bbedfde56a0d1"} Dec 10 08:11:53 crc kubenswrapper[4765]: I1210 08:11:53.885670 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zxvvj" Dec 10 08:11:54 crc kubenswrapper[4765]: I1210 08:11:54.023576 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1-catalog-content\") pod \"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1\" (UID: \"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1\") " Dec 10 08:11:54 crc kubenswrapper[4765]: I1210 08:11:54.023687 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwnc9\" (UniqueName: \"kubernetes.io/projected/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1-kube-api-access-kwnc9\") pod \"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1\" (UID: \"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1\") " Dec 10 08:11:54 crc kubenswrapper[4765]: I1210 08:11:54.023852 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1-utilities\") pod \"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1\" (UID: \"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1\") " Dec 10 08:11:54 crc kubenswrapper[4765]: I1210 08:11:54.024677 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1-utilities" (OuterVolumeSpecName: "utilities") pod "9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1" (UID: "9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 08:11:54 crc kubenswrapper[4765]: I1210 08:11:54.029097 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1-kube-api-access-kwnc9" (OuterVolumeSpecName: "kube-api-access-kwnc9") pod "9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1" (UID: "9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1"). InnerVolumeSpecName "kube-api-access-kwnc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 08:11:54 crc kubenswrapper[4765]: I1210 08:11:54.125274 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwnc9\" (UniqueName: \"kubernetes.io/projected/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1-kube-api-access-kwnc9\") on node \"crc\" DevicePath \"\"" Dec 10 08:11:54 crc kubenswrapper[4765]: I1210 08:11:54.125314 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 08:11:54 crc kubenswrapper[4765]: I1210 08:11:54.138683 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1" (UID: "9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 08:11:54 crc kubenswrapper[4765]: I1210 08:11:54.227051 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 08:11:54 crc kubenswrapper[4765]: I1210 08:11:54.705762 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxvvj" event={"ID":"9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1","Type":"ContainerDied","Data":"8c277ac13a42812f35dcd61be112beed2a98fe84bc4048cf01103a29782eae06"} Dec 10 08:11:54 crc kubenswrapper[4765]: I1210 08:11:54.705835 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zxvvj" Dec 10 08:11:54 crc kubenswrapper[4765]: I1210 08:11:54.706196 4765 scope.go:117] "RemoveContainer" containerID="d2d5d1ab70a5eae5390f7725ad17667fe71100bb88975d41435bbedfde56a0d1" Dec 10 08:11:54 crc kubenswrapper[4765]: I1210 08:11:54.730458 4765 scope.go:117] "RemoveContainer" containerID="3adcb8cdcec31bd0a8856f7ee9d9105561e7bd2d2673b5201d46b48aa6ea6f13" Dec 10 08:11:54 crc kubenswrapper[4765]: I1210 08:11:54.733042 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zxvvj"] Dec 10 08:11:54 crc kubenswrapper[4765]: I1210 08:11:54.741837 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zxvvj"] Dec 10 08:11:54 crc kubenswrapper[4765]: I1210 08:11:54.751978 4765 scope.go:117] "RemoveContainer" containerID="266e2916a7b013e4177e4e68240b87eb42aaafb64d4857f8ba55a9b0d82a4113" Dec 10 08:11:56 crc kubenswrapper[4765]: I1210 08:11:56.600952 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1" path="/var/lib/kubelet/pods/9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1/volumes" Dec 10 08:12:04 crc kubenswrapper[4765]: I1210 08:12:04.049721 4765 patch_prober.go:28] interesting pod/machine-config-daemon-xlv8w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 08:12:04 crc kubenswrapper[4765]: I1210 08:12:04.050198 4765 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 08:12:04 crc kubenswrapper[4765]: I1210 08:12:04.050261 4765 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" Dec 10 08:12:04 crc kubenswrapper[4765]: I1210 08:12:04.050872 4765 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c88d51964b2207dc5d35d2de1788775a0c483596c0b2b783bdb58cfed6375086"} pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 08:12:04 crc kubenswrapper[4765]: I1210 08:12:04.050926 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerName="machine-config-daemon" containerID="cri-o://c88d51964b2207dc5d35d2de1788775a0c483596c0b2b783bdb58cfed6375086" gracePeriod=600 Dec 10 08:12:04 crc kubenswrapper[4765]: E1210 08:12:04.182392 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:12:04 crc kubenswrapper[4765]: I1210 08:12:04.783875 4765 generic.go:334] "Generic (PLEG): container finished" podID="d0714d9a-e9d5-4aca-8341-a073849f9234" containerID="c88d51964b2207dc5d35d2de1788775a0c483596c0b2b783bdb58cfed6375086" exitCode=0 Dec 10 08:12:04 crc kubenswrapper[4765]: I1210 08:12:04.783965 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" event={"ID":"d0714d9a-e9d5-4aca-8341-a073849f9234","Type":"ContainerDied","Data":"c88d51964b2207dc5d35d2de1788775a0c483596c0b2b783bdb58cfed6375086"} Dec 10 08:12:04 crc kubenswrapper[4765]: I1210 08:12:04.784006 4765 scope.go:117] "RemoveContainer" containerID="36428c9b79bcd275e10b5f0f6d13bce947dacd2ed8442b694ef4b92da1180efd" Dec 10 08:12:04 crc kubenswrapper[4765]: I1210 08:12:04.784557 4765 scope.go:117] "RemoveContainer" containerID="c88d51964b2207dc5d35d2de1788775a0c483596c0b2b783bdb58cfed6375086" Dec 10 08:12:04 crc kubenswrapper[4765]: E1210 08:12:04.784768 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:12:19 crc kubenswrapper[4765]: I1210 08:12:19.588931 4765 scope.go:117] "RemoveContainer" containerID="c88d51964b2207dc5d35d2de1788775a0c483596c0b2b783bdb58cfed6375086" Dec 10 08:12:19 crc kubenswrapper[4765]: E1210 08:12:19.589515 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:12:26 crc kubenswrapper[4765]: I1210 08:12:26.286265 4765 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6dws7"] Dec 10 08:12:26 crc kubenswrapper[4765]: E1210 08:12:26.287118 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1" containerName="registry-server" Dec 10 08:12:26 crc kubenswrapper[4765]: I1210 08:12:26.287132 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1" containerName="registry-server" Dec 10 08:12:26 crc kubenswrapper[4765]: E1210 08:12:26.287142 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1" containerName="extract-utilities" Dec 10 08:12:26 crc kubenswrapper[4765]: I1210 08:12:26.287149 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1" containerName="extract-utilities" Dec 10 08:12:26 crc kubenswrapper[4765]: E1210 08:12:26.287174 4765 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1" containerName="extract-content" Dec 10 08:12:26 crc kubenswrapper[4765]: I1210 08:12:26.287182 4765 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1" containerName="extract-content" Dec 10 08:12:26 crc kubenswrapper[4765]: I1210 08:12:26.287336 4765 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cd9ccc2-93b9-454c-b16f-b2cdbfb32be1" containerName="registry-server" Dec 10 08:12:26 crc kubenswrapper[4765]: I1210 08:12:26.288575 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6dws7" Dec 10 08:12:26 crc kubenswrapper[4765]: I1210 08:12:26.295144 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6dws7"] Dec 10 08:12:26 crc kubenswrapper[4765]: I1210 08:12:26.422608 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55125a9b-9de2-4625-b298-7235008fa05c-catalog-content\") pod \"redhat-marketplace-6dws7\" (UID: \"55125a9b-9de2-4625-b298-7235008fa05c\") " pod="openshift-marketplace/redhat-marketplace-6dws7" Dec 10 08:12:26 crc kubenswrapper[4765]: I1210 08:12:26.422863 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55125a9b-9de2-4625-b298-7235008fa05c-utilities\") pod \"redhat-marketplace-6dws7\" (UID: \"55125a9b-9de2-4625-b298-7235008fa05c\") " pod="openshift-marketplace/redhat-marketplace-6dws7" Dec 10 08:12:26 crc kubenswrapper[4765]: I1210 08:12:26.423002 4765 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxb6m\" (UniqueName: \"kubernetes.io/projected/55125a9b-9de2-4625-b298-7235008fa05c-kube-api-access-wxb6m\") pod \"redhat-marketplace-6dws7\" (UID: \"55125a9b-9de2-4625-b298-7235008fa05c\") " pod="openshift-marketplace/redhat-marketplace-6dws7" Dec 10 08:12:26 crc kubenswrapper[4765]: I1210 08:12:26.524551 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55125a9b-9de2-4625-b298-7235008fa05c-catalog-content\") pod \"redhat-marketplace-6dws7\" (UID: \"55125a9b-9de2-4625-b298-7235008fa05c\") " pod="openshift-marketplace/redhat-marketplace-6dws7" Dec 10 08:12:26 crc kubenswrapper[4765]: I1210 08:12:26.524702 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55125a9b-9de2-4625-b298-7235008fa05c-utilities\") pod \"redhat-marketplace-6dws7\" (UID: \"55125a9b-9de2-4625-b298-7235008fa05c\") " pod="openshift-marketplace/redhat-marketplace-6dws7" Dec 10 08:12:26 crc kubenswrapper[4765]: I1210 08:12:26.524768 4765 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxb6m\" (UniqueName: \"kubernetes.io/projected/55125a9b-9de2-4625-b298-7235008fa05c-kube-api-access-wxb6m\") pod \"redhat-marketplace-6dws7\" (UID: \"55125a9b-9de2-4625-b298-7235008fa05c\") " pod="openshift-marketplace/redhat-marketplace-6dws7" Dec 10 08:12:26 crc kubenswrapper[4765]: I1210 08:12:26.525347 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55125a9b-9de2-4625-b298-7235008fa05c-catalog-content\") pod \"redhat-marketplace-6dws7\" (UID: \"55125a9b-9de2-4625-b298-7235008fa05c\") " pod="openshift-marketplace/redhat-marketplace-6dws7" Dec 10 08:12:26 crc kubenswrapper[4765]: I1210 08:12:26.525458 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55125a9b-9de2-4625-b298-7235008fa05c-utilities\") pod \"redhat-marketplace-6dws7\" (UID: \"55125a9b-9de2-4625-b298-7235008fa05c\") " pod="openshift-marketplace/redhat-marketplace-6dws7" Dec 10 08:12:26 crc kubenswrapper[4765]: I1210 08:12:26.552109 4765 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxb6m\" (UniqueName: \"kubernetes.io/projected/55125a9b-9de2-4625-b298-7235008fa05c-kube-api-access-wxb6m\") pod \"redhat-marketplace-6dws7\" (UID: \"55125a9b-9de2-4625-b298-7235008fa05c\") " pod="openshift-marketplace/redhat-marketplace-6dws7" Dec 10 08:12:26 crc kubenswrapper[4765]: I1210 08:12:26.608750 4765 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6dws7" Dec 10 08:12:27 crc kubenswrapper[4765]: I1210 08:12:27.044573 4765 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6dws7"] Dec 10 08:12:27 crc kubenswrapper[4765]: I1210 08:12:27.985145 4765 generic.go:334] "Generic (PLEG): container finished" podID="55125a9b-9de2-4625-b298-7235008fa05c" containerID="7322010477dc7c62456e1e5fefe02f51d63636e55a409eb93534d792110c12dc" exitCode=0 Dec 10 08:12:27 crc kubenswrapper[4765]: I1210 08:12:27.985363 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6dws7" event={"ID":"55125a9b-9de2-4625-b298-7235008fa05c","Type":"ContainerDied","Data":"7322010477dc7c62456e1e5fefe02f51d63636e55a409eb93534d792110c12dc"} Dec 10 08:12:27 crc kubenswrapper[4765]: I1210 08:12:27.985729 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6dws7" event={"ID":"55125a9b-9de2-4625-b298-7235008fa05c","Type":"ContainerStarted","Data":"1eb473f4f10cac2cb3c738cbb7cbcbf6a1adc55fb1f3563937607d46e72fe7bf"} Dec 10 08:12:30 crc kubenswrapper[4765]: I1210 08:12:30.002131 4765 generic.go:334] "Generic (PLEG): container finished" podID="55125a9b-9de2-4625-b298-7235008fa05c" containerID="2296b022252aa41f6b3f735246b14f62dc3a09e49cbbcabb7096d856f5548277" exitCode=0 Dec 10 08:12:30 crc kubenswrapper[4765]: I1210 08:12:30.002216 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6dws7" event={"ID":"55125a9b-9de2-4625-b298-7235008fa05c","Type":"ContainerDied","Data":"2296b022252aa41f6b3f735246b14f62dc3a09e49cbbcabb7096d856f5548277"} Dec 10 08:12:31 crc kubenswrapper[4765]: I1210 08:12:31.009826 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6dws7" event={"ID":"55125a9b-9de2-4625-b298-7235008fa05c","Type":"ContainerStarted","Data":"ade4f1cdf16a181a90a7214eb897b937b7d43cc0577938bdd34f2ed0549d4929"} Dec 10 08:12:31 crc kubenswrapper[4765]: I1210 08:12:31.038762 4765 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6dws7" podStartSLOduration=2.52941431 podStartE2EDuration="5.038734742s" podCreationTimestamp="2025-12-10 08:12:26 +0000 UTC" firstStartedPulling="2025-12-10 08:12:27.985946478 +0000 UTC m=+5067.712611794" lastFinishedPulling="2025-12-10 08:12:30.49526691 +0000 UTC m=+5070.221932226" observedRunningTime="2025-12-10 08:12:31.032929787 +0000 UTC m=+5070.759595113" watchObservedRunningTime="2025-12-10 08:12:31.038734742 +0000 UTC m=+5070.765400058" Dec 10 08:12:34 crc kubenswrapper[4765]: I1210 08:12:34.589794 4765 scope.go:117] "RemoveContainer" containerID="c88d51964b2207dc5d35d2de1788775a0c483596c0b2b783bdb58cfed6375086" Dec 10 08:12:34 crc kubenswrapper[4765]: E1210 08:12:34.590423 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:12:36 crc kubenswrapper[4765]: I1210 08:12:36.609378 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6dws7" Dec 10 08:12:36 crc kubenswrapper[4765]: I1210 08:12:36.609727 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6dws7" Dec 10 08:12:36 crc kubenswrapper[4765]: I1210 08:12:36.651012 4765 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6dws7" Dec 10 08:12:37 crc kubenswrapper[4765]: I1210 08:12:37.098819 4765 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6dws7" Dec 10 08:12:37 crc kubenswrapper[4765]: I1210 08:12:37.156905 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6dws7"] Dec 10 08:12:39 crc kubenswrapper[4765]: I1210 08:12:39.067709 4765 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6dws7" podUID="55125a9b-9de2-4625-b298-7235008fa05c" containerName="registry-server" containerID="cri-o://ade4f1cdf16a181a90a7214eb897b937b7d43cc0577938bdd34f2ed0549d4929" gracePeriod=2 Dec 10 08:12:39 crc kubenswrapper[4765]: I1210 08:12:39.457697 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6dws7" Dec 10 08:12:39 crc kubenswrapper[4765]: I1210 08:12:39.522067 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55125a9b-9de2-4625-b298-7235008fa05c-catalog-content\") pod \"55125a9b-9de2-4625-b298-7235008fa05c\" (UID: \"55125a9b-9de2-4625-b298-7235008fa05c\") " Dec 10 08:12:39 crc kubenswrapper[4765]: I1210 08:12:39.522139 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxb6m\" (UniqueName: \"kubernetes.io/projected/55125a9b-9de2-4625-b298-7235008fa05c-kube-api-access-wxb6m\") pod \"55125a9b-9de2-4625-b298-7235008fa05c\" (UID: \"55125a9b-9de2-4625-b298-7235008fa05c\") " Dec 10 08:12:39 crc kubenswrapper[4765]: I1210 08:12:39.522169 4765 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55125a9b-9de2-4625-b298-7235008fa05c-utilities\") pod \"55125a9b-9de2-4625-b298-7235008fa05c\" (UID: \"55125a9b-9de2-4625-b298-7235008fa05c\") " Dec 10 08:12:39 crc kubenswrapper[4765]: I1210 08:12:39.523355 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55125a9b-9de2-4625-b298-7235008fa05c-utilities" (OuterVolumeSpecName: "utilities") pod "55125a9b-9de2-4625-b298-7235008fa05c" (UID: "55125a9b-9de2-4625-b298-7235008fa05c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 08:12:39 crc kubenswrapper[4765]: I1210 08:12:39.527591 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55125a9b-9de2-4625-b298-7235008fa05c-kube-api-access-wxb6m" (OuterVolumeSpecName: "kube-api-access-wxb6m") pod "55125a9b-9de2-4625-b298-7235008fa05c" (UID: "55125a9b-9de2-4625-b298-7235008fa05c"). InnerVolumeSpecName "kube-api-access-wxb6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 08:12:39 crc kubenswrapper[4765]: I1210 08:12:39.543271 4765 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55125a9b-9de2-4625-b298-7235008fa05c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "55125a9b-9de2-4625-b298-7235008fa05c" (UID: "55125a9b-9de2-4625-b298-7235008fa05c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 08:12:39 crc kubenswrapper[4765]: I1210 08:12:39.623417 4765 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55125a9b-9de2-4625-b298-7235008fa05c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 08:12:39 crc kubenswrapper[4765]: I1210 08:12:39.623453 4765 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxb6m\" (UniqueName: \"kubernetes.io/projected/55125a9b-9de2-4625-b298-7235008fa05c-kube-api-access-wxb6m\") on node \"crc\" DevicePath \"\"" Dec 10 08:12:39 crc kubenswrapper[4765]: I1210 08:12:39.623464 4765 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55125a9b-9de2-4625-b298-7235008fa05c-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 08:12:40 crc kubenswrapper[4765]: I1210 08:12:40.079573 4765 generic.go:334] "Generic (PLEG): container finished" podID="55125a9b-9de2-4625-b298-7235008fa05c" containerID="ade4f1cdf16a181a90a7214eb897b937b7d43cc0577938bdd34f2ed0549d4929" exitCode=0 Dec 10 08:12:40 crc kubenswrapper[4765]: I1210 08:12:40.079683 4765 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6dws7" Dec 10 08:12:40 crc kubenswrapper[4765]: I1210 08:12:40.079627 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6dws7" event={"ID":"55125a9b-9de2-4625-b298-7235008fa05c","Type":"ContainerDied","Data":"ade4f1cdf16a181a90a7214eb897b937b7d43cc0577938bdd34f2ed0549d4929"} Dec 10 08:12:40 crc kubenswrapper[4765]: I1210 08:12:40.080621 4765 scope.go:117] "RemoveContainer" containerID="ade4f1cdf16a181a90a7214eb897b937b7d43cc0577938bdd34f2ed0549d4929" Dec 10 08:12:40 crc kubenswrapper[4765]: I1210 08:12:40.080654 4765 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6dws7" event={"ID":"55125a9b-9de2-4625-b298-7235008fa05c","Type":"ContainerDied","Data":"1eb473f4f10cac2cb3c738cbb7cbcbf6a1adc55fb1f3563937607d46e72fe7bf"} Dec 10 08:12:40 crc kubenswrapper[4765]: I1210 08:12:40.105873 4765 scope.go:117] "RemoveContainer" containerID="2296b022252aa41f6b3f735246b14f62dc3a09e49cbbcabb7096d856f5548277" Dec 10 08:12:40 crc kubenswrapper[4765]: I1210 08:12:40.108538 4765 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6dws7"] Dec 10 08:12:40 crc kubenswrapper[4765]: I1210 08:12:40.115134 4765 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6dws7"] Dec 10 08:12:40 crc kubenswrapper[4765]: I1210 08:12:40.125888 4765 scope.go:117] "RemoveContainer" containerID="7322010477dc7c62456e1e5fefe02f51d63636e55a409eb93534d792110c12dc" Dec 10 08:12:40 crc kubenswrapper[4765]: I1210 08:12:40.154070 4765 scope.go:117] "RemoveContainer" containerID="ade4f1cdf16a181a90a7214eb897b937b7d43cc0577938bdd34f2ed0549d4929" Dec 10 08:12:40 crc kubenswrapper[4765]: E1210 08:12:40.155599 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ade4f1cdf16a181a90a7214eb897b937b7d43cc0577938bdd34f2ed0549d4929\": container with ID starting with ade4f1cdf16a181a90a7214eb897b937b7d43cc0577938bdd34f2ed0549d4929 not found: ID does not exist" containerID="ade4f1cdf16a181a90a7214eb897b937b7d43cc0577938bdd34f2ed0549d4929" Dec 10 08:12:40 crc kubenswrapper[4765]: I1210 08:12:40.155666 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ade4f1cdf16a181a90a7214eb897b937b7d43cc0577938bdd34f2ed0549d4929"} err="failed to get container status \"ade4f1cdf16a181a90a7214eb897b937b7d43cc0577938bdd34f2ed0549d4929\": rpc error: code = NotFound desc = could not find container \"ade4f1cdf16a181a90a7214eb897b937b7d43cc0577938bdd34f2ed0549d4929\": container with ID starting with ade4f1cdf16a181a90a7214eb897b937b7d43cc0577938bdd34f2ed0549d4929 not found: ID does not exist" Dec 10 08:12:40 crc kubenswrapper[4765]: I1210 08:12:40.155704 4765 scope.go:117] "RemoveContainer" containerID="2296b022252aa41f6b3f735246b14f62dc3a09e49cbbcabb7096d856f5548277" Dec 10 08:12:40 crc kubenswrapper[4765]: E1210 08:12:40.156302 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2296b022252aa41f6b3f735246b14f62dc3a09e49cbbcabb7096d856f5548277\": container with ID starting with 2296b022252aa41f6b3f735246b14f62dc3a09e49cbbcabb7096d856f5548277 not found: ID does not exist" containerID="2296b022252aa41f6b3f735246b14f62dc3a09e49cbbcabb7096d856f5548277" Dec 10 08:12:40 crc kubenswrapper[4765]: I1210 08:12:40.156329 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2296b022252aa41f6b3f735246b14f62dc3a09e49cbbcabb7096d856f5548277"} err="failed to get container status \"2296b022252aa41f6b3f735246b14f62dc3a09e49cbbcabb7096d856f5548277\": rpc error: code = NotFound desc = could not find container \"2296b022252aa41f6b3f735246b14f62dc3a09e49cbbcabb7096d856f5548277\": container with ID starting with 2296b022252aa41f6b3f735246b14f62dc3a09e49cbbcabb7096d856f5548277 not found: ID does not exist" Dec 10 08:12:40 crc kubenswrapper[4765]: I1210 08:12:40.156344 4765 scope.go:117] "RemoveContainer" containerID="7322010477dc7c62456e1e5fefe02f51d63636e55a409eb93534d792110c12dc" Dec 10 08:12:40 crc kubenswrapper[4765]: E1210 08:12:40.156752 4765 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7322010477dc7c62456e1e5fefe02f51d63636e55a409eb93534d792110c12dc\": container with ID starting with 7322010477dc7c62456e1e5fefe02f51d63636e55a409eb93534d792110c12dc not found: ID does not exist" containerID="7322010477dc7c62456e1e5fefe02f51d63636e55a409eb93534d792110c12dc" Dec 10 08:12:40 crc kubenswrapper[4765]: I1210 08:12:40.156772 4765 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7322010477dc7c62456e1e5fefe02f51d63636e55a409eb93534d792110c12dc"} err="failed to get container status \"7322010477dc7c62456e1e5fefe02f51d63636e55a409eb93534d792110c12dc\": rpc error: code = NotFound desc = could not find container \"7322010477dc7c62456e1e5fefe02f51d63636e55a409eb93534d792110c12dc\": container with ID starting with 7322010477dc7c62456e1e5fefe02f51d63636e55a409eb93534d792110c12dc not found: ID does not exist" Dec 10 08:12:40 crc kubenswrapper[4765]: I1210 08:12:40.599601 4765 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55125a9b-9de2-4625-b298-7235008fa05c" path="/var/lib/kubelet/pods/55125a9b-9de2-4625-b298-7235008fa05c/volumes" Dec 10 08:12:48 crc kubenswrapper[4765]: I1210 08:12:48.589811 4765 scope.go:117] "RemoveContainer" containerID="c88d51964b2207dc5d35d2de1788775a0c483596c0b2b783bdb58cfed6375086" Dec 10 08:12:48 crc kubenswrapper[4765]: E1210 08:12:48.593446 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:13:01 crc kubenswrapper[4765]: I1210 08:13:01.588903 4765 scope.go:117] "RemoveContainer" containerID="c88d51964b2207dc5d35d2de1788775a0c483596c0b2b783bdb58cfed6375086" Dec 10 08:13:01 crc kubenswrapper[4765]: E1210 08:13:01.590487 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:13:15 crc kubenswrapper[4765]: I1210 08:13:15.589519 4765 scope.go:117] "RemoveContainer" containerID="c88d51964b2207dc5d35d2de1788775a0c483596c0b2b783bdb58cfed6375086" Dec 10 08:13:15 crc kubenswrapper[4765]: E1210 08:13:15.590382 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:13:26 crc kubenswrapper[4765]: I1210 08:13:26.589345 4765 scope.go:117] "RemoveContainer" containerID="c88d51964b2207dc5d35d2de1788775a0c483596c0b2b783bdb58cfed6375086" Dec 10 08:13:26 crc kubenswrapper[4765]: E1210 08:13:26.590243 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" Dec 10 08:13:30 crc kubenswrapper[4765]: I1210 08:13:30.655404 4765 scope.go:117] "RemoveContainer" containerID="15e9069211c8ee280609756e12c3b86da9c2242833e50dcd936d0067819cd8a2" Dec 10 08:13:30 crc kubenswrapper[4765]: I1210 08:13:30.679967 4765 scope.go:117] "RemoveContainer" containerID="2bdc3c8f9e0bb315dcf24204b5cab4779d09a6c34cdd2edd8396b40c148f4354" Dec 10 08:13:41 crc kubenswrapper[4765]: I1210 08:13:41.589515 4765 scope.go:117] "RemoveContainer" containerID="c88d51964b2207dc5d35d2de1788775a0c483596c0b2b783bdb58cfed6375086" Dec 10 08:13:41 crc kubenswrapper[4765]: E1210 08:13:41.590463 4765 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xlv8w_openshift-machine-config-operator(d0714d9a-e9d5-4aca-8341-a073849f9234)\"" pod="openshift-machine-config-operator/machine-config-daemon-xlv8w" podUID="d0714d9a-e9d5-4aca-8341-a073849f9234" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515116225674024456 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015116225674017373 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015116213323016502 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015116213324015453 5ustar corecore